Roll V8 back to 3.6

Roll back to V8 3.6 to fix x86 build, we don't have ucontext.h.

This reverts commits:
5d4cdbf7a67d3662fa0bee4efdb7edd8daec9b0b
c7cc028aaeedbbfa11c11d0b7b243b3d9e837ed9
592a9fc1d8ea420377a2e7efd0600e20b058be2b

Bug: 5688872
Change-Id: Ic961bb5e65b778e98bbfb71cce71d99fa949e995
diff --git a/tools/bash-completion.sh b/tools/bash-completion.sh
deleted file mode 100644
index 9f65c67..0000000
--- a/tools/bash-completion.sh
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/bin/bash
-# Copyright 2012 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-#       notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-#       copyright notice, this list of conditions and the following
-#       disclaimer in the documentation and/or other materials provided
-#       with the distribution.
-#     * Neither the name of Google Inc. nor the names of its
-#       contributors may be used to endorse or promote products derived
-#       from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# Inspired by and based on:
-# http://src.chromium.org/viewvc/chrome/trunk/src/tools/bash-completion
-
-# Flag completion rule for bash.
-# To load in your shell, "source path/to/this/file".
-
-v8_source=$(readlink -f $(dirname $BASH_SOURCE)/..)
-
-_v8_flag() {
-  local cur defines targets
-  cur="${COMP_WORDS[COMP_CWORD]}"
-  defines=$(cat src/flag-definitions.h \
-    | grep "^DEFINE" \
-    | grep -v "DEFINE_implication" \
-    | sed -e 's/_/-/g')
-  targets=$(echo "$defines" \
-    | sed -ne 's/^DEFINE-[^(]*(\([^,]*\).*/--\1/p'; \
-    echo "$defines" \
-    | sed -ne 's/^DEFINE-bool(\([^,]*\).*/--no\1/p'; \
-    cat src/d8.cc \
-    | grep "strcmp(argv\[i\]" \
-    | sed -ne 's/^[^"]*"--\([^"]*\)".*/--\1/p')
-  COMPREPLY=($(compgen -W "$targets" -- "$cur"))
-  return 0
-}
-
-complete -F _v8_flag -f d8
diff --git a/tools/check-static-initializers.sh b/tools/check-static-initializers.sh
deleted file mode 100644
index 18add3a..0000000
--- a/tools/check-static-initializers.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-# Copyright 2012 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-#       notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-#       copyright notice, this list of conditions and the following
-#       disclaimer in the documentation and/or other materials provided
-#       with the distribution.
-#     * Neither the name of Google Inc. nor the names of its
-#       contributors may be used to endorse or promote products derived
-#       from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# Checks that the number of compilation units having at least one static
-# initializer in d8 matches the one defined below.
-# Note that the project must be built with SCons before running this script.
-
-# Allow:
-#  - _GLOBAL__I__ZN2v88internal32AtomicOps_Internalx86CPUFeaturesE
-#  - _GLOBAL__I__ZN2v810LineEditor6first_E
-expected_static_init_count=2
-
-v8_root=$(readlink -f $(dirname $BASH_SOURCE)/../)
-d8="${v8_root}/d8"
-
-if [ ! -f "$d8" ]; then
-  echo "Please build the project with SCons."
-  exit 1
-fi
-
-static_inits=$(nm "$d8" | grep _GLOBAL__I | awk '{ print $NF; }')
-
-static_init_count=$(echo "$static_inits" | wc -l)
-
-if [ $static_init_count -gt $expected_static_init_count ]; then
-  echo "Too many static initializers."
-  echo "$static_inits"
-  exit 1
-fi
diff --git a/tools/common-includes.sh b/tools/common-includes.sh
deleted file mode 100644
index 8f0e78b..0000000
--- a/tools/common-includes.sh
+++ /dev/null
@@ -1,197 +0,0 @@
-# Copyright 2012 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-#       notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-#       copyright notice, this list of conditions and the following
-#       disclaimer in the documentation and/or other materials provided
-#       with the distribution.
-#     * Neither the name of Google Inc. nor the names of its
-#       contributors may be used to endorse or promote products derived
-#       from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# This file contains common function definitions for various other shell
-# scripts in this directory. It is not meant to be executed by itself.
-
-# Important: before including this file, the following variables must be set:
-# - BRANCHNAME
-# - PERSISTFILE_BASENAME
-
-TEMP_BRANCH=$BRANCHNAME-temporary-branch-created-by-script
-VERSION_FILE="src/version.cc"
-CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry"
-PATCH_FILE="$PERSISTFILE_BASENAME-patch"
-PATCH_OUTPUT_FILE="$PERSISTFILE_BASENAME-patch-output"
-COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg"
-TOUCHED_FILES_FILE="$PERSISTFILE_BASENAME-touched-files"
-TRUNK_REVISION_FILE="$PERSISTFILE_BASENAME-trunkrevision"
-START_STEP=0
-CURRENT_STEP=0
-
-die() {
-  [[ -n "$1" ]] && echo "Error: $1"
-  echo "Exiting."
-  exit 1
-}
-
-confirm() {
-  echo -n "$1 [Y/n] "
-  read ANSWER
-  if [[ -z "$ANSWER" || "$ANSWER" == "Y" || "$ANSWER" == "y" ]] ; then
-    return 0
-  else
-    return 1
-  fi
-}
-
-delete_branch() {
-  local MATCH=$(git branch | grep "$1" | awk '{print $NF}' | grep -x $1)
-  if [ "$MATCH" == "$1" ] ; then
-    confirm "Branch $1 exists, do you want to delete it?"
-    if [ $? -eq 0 ] ; then
-      git branch -D $1 || die "Deleting branch '$1' failed."
-      echo "Branch $1 deleted."
-    else
-      die "Can't continue. Please delete branch $1 and try again."
-    fi
-  fi
-}
-
-# Persist and restore variables to support canceling/resuming execution
-# of this script.
-persist() {
-  local VARNAME=$1
-  local FILE="$PERSISTFILE_BASENAME-$VARNAME"
-  echo "${!VARNAME}" > $FILE
-}
-
-restore() {
-  local VARNAME=$1
-  local FILE="$PERSISTFILE_BASENAME-$VARNAME"
-  local VALUE="$(cat $FILE)"
-  eval "$VARNAME=\"$VALUE\""
-}
-
-restore_if_unset() {
-  local VARNAME=$1
-  [[ -z "${!VARNAME}" ]] && restore "$VARNAME"
-  [[ -z "${!VARNAME}" ]] && die "Variable '$VARNAME' could not be restored."
-}
-
-initial_environment_checks() {
-  # Cancel if this is not a git checkout.
-  [[ -d .git ]] \
-    || die "This is not a git checkout, this script won't work for you."
-
-  # Cancel if EDITOR is unset or not executable.
-  [[ -n "$EDITOR" && -x "$(which $EDITOR)" ]] \
-    || die "Please set your EDITOR environment variable, you'll need it."
-}
-
-common_prepare() {
-  # Check for a clean workdir.
-  [[ -z "$(git status -s -uno)" ]] \
-    || die "Workspace is not clean. Please commit or undo your changes."
-
-  # Persist current branch.
-  CURRENT_BRANCH=$(git status -s -b -uno | grep "^##" | awk '{print $2}')
-  persist "CURRENT_BRANCH"
-
-  # Fetch unfetched revisions.
-  git svn fetch || die "'git svn fetch' failed."
-
-  # Get ahold of a safe temporary branch and check it out.
-  if [ "$CURRENT_BRANCH" != "$TEMP_BRANCH" ] ; then
-    delete_branch $TEMP_BRANCH
-    git checkout -b $TEMP_BRANCH
-  fi
-
-  # Delete the branch that will be created later if it exists already.
-  delete_branch $BRANCHNAME
-}
-
-common_cleanup() {
-  restore_if_unset "CURRENT_BRANCH"
-  git checkout -f $CURRENT_BRANCH
-  [[ "$TEMP_BRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TEMP_BRANCH
-  [[ "$BRANCHNAME" != "$CURRENT_BRANCH" ]] && git branch -D $BRANCHNAME
-  # Clean up all temporary files.
-  rm -f "$PERSISTFILE_BASENAME"*
-}
-
-# These two functions take a prefix for the variable names as first argument.
-read_and_persist_version() {
-  for v in MAJOR_VERSION MINOR_VERSION BUILD_NUMBER PATCH_LEVEL; do
-    VARNAME="$1${v%%_*}"
-    VALUE=$(grep "#define $v" "$VERSION_FILE" | awk '{print $NF}')
-    eval "$VARNAME=\"$VALUE\""
-    persist "$VARNAME"
-  done
-}
-restore_version_if_unset() {
-  for v in MAJOR MINOR BUILD PATCH; do
-    restore_if_unset "$1$v"
-  done
-}
-
-upload_step() {
-  let CURRENT_STEP+=1
-  if [ $START_STEP -le $CURRENT_STEP ] ; then
-    echo ">>> Step $CURRENT_STEP: Upload for code review."
-    echo -n "Please enter the email address of a V8 reviewer for your patch: "
-    read REVIEWER
-    git cl upload -r "$REVIEWER" --send-mail \
-      || die "'git cl upload' failed, please try again."
-  fi
-}
-
-wait_for_lgtm() {
-  echo "Please wait for an LGTM, then type \"LGTM<Return>\" to commit your \
-change. (If you need to iterate on the patch or double check that it's \
-sane, do so in another shell, but remember to not change the headline of \
-the uploaded CL."
-  unset ANSWER
-  while [ "$ANSWER" != "LGTM" ] ; do
-    [[ -n "$ANSWER" ]] && echo "That was not 'LGTM'."
-    echo -n "> "
-    read ANSWER
-  done
-}
-
-# Takes a file containing the patch to apply as first argument.
-apply_patch() {
-  patch -p1 < "$1" > "$PATCH_OUTPUT_FILE" || \
-    { cat "$PATCH_OUTPUT_FILE" && die "Applying the patch failed."; }
-  tee < "$PATCH_OUTPUT_FILE" >(awk '{print $NF}' >> "$TOUCHED_FILES_FILE")
-  rm "$PATCH_OUTPUT_FILE"
-}
-
-stage_files() {
-  # Stage added and modified files.
-  TOUCHED_FILES=$(cat "$TOUCHED_FILES_FILE")
-  for FILE in $TOUCHED_FILES ; do
-    git add "$FILE"
-  done
-  # Stage deleted files.
-  DELETED_FILES=$(git status -s -uno --porcelain | grep "^ D" \
-                                                 | awk '{print $NF}')
-  for FILE in $DELETED_FILES ; do
-    git rm "$FILE"
-  done
-  rm -f "$TOUCHED_FILES_FILE"
-}
diff --git a/tools/disasm.py b/tools/disasm.py
index 681b425..c326382 100644
--- a/tools/disasm.py
+++ b/tools/disasm.py
@@ -48,8 +48,7 @@
 _ARCH_MAP = {
   "ia32": "-m i386",
   "x64": "-m i386 -M x86-64",
-  "arm": "-m arm",  # Not supported by our objdump build.
-  "mips": "-m mips"  # Not supported by our objdump build.
+  "arm": "-m arm"  # Not supported by our objdump build.
 }
 
 
diff --git a/tools/gc-nvp-trace-processor.py b/tools/gc-nvp-trace-processor.py
index fe5a7f3..511ab2b 100755
--- a/tools/gc-nvp-trace-processor.py
+++ b/tools/gc-nvp-trace-processor.py
@@ -219,17 +219,13 @@
   if r['gc'] == 's':
     # there is no 'other' scope for scavenging collections.
     return 0
-  return r['pause'] - r['mark'] - r['sweep'] - r['external']
+  return r['pause'] - r['mark'] - r['sweep'] - r['compact'] - r['external']
 
 def scavenge_scope(r):
   if r['gc'] == 's':
     return r['pause'] - r['external']
   return 0
 
-
-def real_mutator(r):
-  return r['mutator'] - r['stepstook']
-
 plots = [
   [
     Set('style fill solid 0.5 noborder'),
@@ -238,24 +234,9 @@
     Plot(Item('Scavenge', scavenge_scope, lc = 'green'),
          Item('Marking', 'mark', lc = 'purple'),
          Item('Sweep', 'sweep', lc = 'blue'),
+         Item('Compaction', 'compact', lc = 'red'),
          Item('External', 'external', lc = '#489D43'),
-         Item('Other', other_scope, lc = 'grey'),
-         Item('IGC Steps', 'stepstook', lc = '#FF6347'))
-  ],
-  [
-    Set('style fill solid 0.5 noborder'),
-    Set('style histogram rowstacked'),
-    Set('style data histograms'),
-    Plot(Item('Scavenge', scavenge_scope, lc = 'green'),
-         Item('Marking', 'mark', lc = 'purple'),
-         Item('Sweep', 'sweep', lc = 'blue'),
-         Item('External', 'external', lc = '#489D43'),
-         Item('Other', other_scope, lc = '#ADD8E6'),
-         Item('External', 'external', lc = '#D3D3D3'))
-  ],
-
-  [
-    Plot(Item('Mutator', real_mutator, lc = 'black', style = 'lines'))
+         Item('Other', other_scope, lc = 'grey'))
   ],
   [
     Set('style histogram rowstacked'),
@@ -294,7 +275,7 @@
   return reduce(lambda t,r: f(t, r[field]), trace, init)
 
 def calc_total(trace, field):
-  return freduce(lambda t,v: t + long(v), field, trace, long(0))
+  return freduce(lambda t,v: t + v, field, trace, 0)
 
 def calc_max(trace, field):
   return freduce(lambda t,r: max(t, r), field, trace, 0)
@@ -307,9 +288,8 @@
   trace = parse_gc_trace(filename)
 
   marksweeps = filter(lambda r: r['gc'] == 'ms', trace)
+  markcompacts = filter(lambda r: r['gc'] == 'mc', trace)
   scavenges = filter(lambda r: r['gc'] == 's', trace)
-  globalgcs = filter(lambda r: r['gc'] != 's', trace)
-
 
   charts = plot_all(plots, trace, filename)
 
@@ -322,7 +302,7 @@
     else:
       avg = 0
     if n > 1:
-      dev = math.sqrt(freduce(lambda t,r: t + (r - avg) ** 2, field, trace, 0) /
+      dev = math.sqrt(freduce(lambda t,r: (r - avg) ** 2, field, trace, 0) /
                       (n - 1))
     else:
       dev = 0
@@ -331,31 +311,6 @@
               '<td>%d</td><td>%d [dev %f]</td></tr>' %
               (prefix, n, total, max, avg, dev))
 
-  def HumanReadable(size):
-    suffixes = ['bytes', 'kB', 'MB', 'GB']
-    power = 1
-    for i in range(len(suffixes)):
-      if size < power*1024:
-        return "%.1f" % (float(size) / power) + " " + suffixes[i]
-      power *= 1024
-
-  def throughput(name, trace):
-    total_live_after = calc_total(trace, 'total_size_after')
-    total_live_before = calc_total(trace, 'total_size_before')
-    total_gc = calc_total(trace, 'pause')
-    if total_gc == 0:
-      return
-    out.write('GC %s Throughput (after): %s / %s ms = %s/ms<br/>' %
-              (name,
-               HumanReadable(total_live_after),
-               total_gc,
-               HumanReadable(total_live_after / total_gc)))
-    out.write('GC %s Throughput (before): %s / %s ms = %s/ms<br/>' %
-              (name,
-               HumanReadable(total_live_before),
-               total_gc,
-               HumanReadable(total_live_before / total_gc)))
-
 
   with open(filename + '.html', 'w') as out:
     out.write('<html><body>')
@@ -365,17 +320,15 @@
     stats(out, 'Total in GC', trace, 'pause')
     stats(out, 'Scavenge', scavenges, 'pause')
     stats(out, 'MarkSweep', marksweeps, 'pause')
+    stats(out, 'MarkCompact', markcompacts, 'pause')
     stats(out, 'Mark', filter(lambda r: r['mark'] != 0, trace), 'mark')
     stats(out, 'Sweep', filter(lambda r: r['sweep'] != 0, trace), 'sweep')
+    stats(out, 'Compact', filter(lambda r: r['compact'] != 0, trace), 'compact')
     stats(out,
           'External',
           filter(lambda r: r['external'] != 0, trace),
           'external')
     out.write('</table>')
-    throughput('TOTAL', trace)
-    throughput('MS', marksweeps)
-    throughput('OLDSPACE', globalgcs)
-    out.write('<br/>')
     for chart in charts:
       out.write('<img src="%s">' % chart)
       out.write('</body></html>')
diff --git a/tools/gcmole/gccause.lua b/tools/gcmole/gccause.lua
index b989176..a6fe542 100644
--- a/tools/gcmole/gccause.lua
+++ b/tools/gcmole/gccause.lua
@@ -48,8 +48,6 @@
 	    T[f] = true
 	    TrackCause(f, (lvl or 0) + 1)
 	 end
-
-	 if f == '<GC>' then break end
       end
    end
 end
diff --git a/tools/gcmole/gcmole.cc b/tools/gcmole/gcmole.cc
index 38ee6e0..71ba24a 100644
--- a/tools/gcmole/gcmole.cc
+++ b/tools/gcmole/gcmole.cc
@@ -69,21 +69,6 @@
 }
 
 
-static std::string EXTERNAL("EXTERNAL");
-static std::string STATE_TAG("enum v8::internal::StateTag");
-
-static bool IsExternalVMState(const clang::ValueDecl* var) {
-  const clang::EnumConstantDecl* enum_constant =
-      dyn_cast<clang::EnumConstantDecl>(var);
-  if (enum_constant != NULL && enum_constant->getNameAsString() == EXTERNAL) {
-    clang::QualType type = enum_constant->getType();
-    return (type.getAsString() == STATE_TAG);
-  }
-
-  return false;
-}
-
-
 struct Resolver {
   explicit Resolver(clang::ASTContext& ctx)
       : ctx_(ctx), decl_ctx_(ctx.getTranslationUnitDecl()) {
@@ -136,13 +121,6 @@
     return true;
   }
 
-  virtual bool VisitDeclRefExpr(clang::DeclRefExpr* expr) {
-    // If function mentions EXTERNAL VMState add artificial garbage collection
-    // mark.
-    if (IsExternalVMState(expr->getDecl())) AddCallee("CollectGarbage");
-    return true;
-  }
-
   void AnalyzeFunction(const clang::FunctionDecl* f) {
     MangledName name;
     if (InV8Namespace(f) && GetMangledName(ctx_, f, &name)) {
@@ -300,10 +278,6 @@
     return reinterpret_cast<Environment*>(effect_ & ~kAllEffects);
   }
 
-  static ExprEffect GC() {
-    return ExprEffect(kCausesGC, NULL);
-  }
-
  private:
   ExprEffect(int effect, Environment* env)
       : effect_((effect & kAllEffects) |
@@ -816,9 +790,6 @@
   ExprEffect Use(const clang::Expr* parent,
                  const clang::ValueDecl* var,
                  const Environment& env) {
-    if (IsExternalVMState(var)) {
-      return ExprEffect::GC();
-    }
     return Use(parent, var->getType(), var->getNameAsString(), env);
   }
 
diff --git a/tools/gcmole/gcmole.lua b/tools/gcmole/gcmole.lua
index 09db547..f8d3b62 100644
--- a/tools/gcmole/gcmole.lua
+++ b/tools/gcmole/gcmole.lua
@@ -106,6 +106,7 @@
                                          cfg.plugin_args,
                                          cfg.triple,
                                          cfg.arch_define)
+
    for _, filename in ipairs(filenames) do
       log("-- %s", filename)
       local action = cmd_line .. " src/" .. filename .. " 2>&1"
@@ -217,13 +218,7 @@
    --      Callsites of such functions are safe as long as they are properly 
    --      check return value and propagate the Failure to the caller.
    --      It should be possible to extend GCMole to understand this.
-   "Heap.*AllocateFunctionPrototype",
-
-   -- Ignore all StateTag methods.
-   "StateTag",
-
-   -- Ignore printing of elements transition.
-   "PrintElementsTransition"
+   "Heap.*AllocateFunctionPrototype"
 };
 
 local function AddCause(name, cause)
diff --git a/tools/gen-postmortem-metadata.py b/tools/gen-postmortem-metadata.py
deleted file mode 100644
index b9b1625..0000000
--- a/tools/gen-postmortem-metadata.py
+++ /dev/null
@@ -1,481 +0,0 @@
-#!/usr/bin/env python
-
-#
-# Copyright 2012 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-#       notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-#       copyright notice, this list of conditions and the following
-#       disclaimer in the documentation and/or other materials provided
-#       with the distribution.
-#     * Neither the name of Google Inc. nor the names of its
-#       contributors may be used to endorse or promote products derived
-#       from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
-
-#
-# Emits a C++ file to be compiled and linked into libv8 to support postmortem
-# debugging tools.  Most importantly, this tool emits constants describing V8
-# internals:
-#
-#    v8dbg_type_CLASS__TYPE = VALUE             Describes class type values
-#    v8dbg_class_CLASS__FIELD__TYPE = OFFSET    Describes class fields
-#    v8dbg_parent_CLASS__PARENT                 Describes class hierarchy
-#    v8dbg_frametype_NAME = VALUE               Describes stack frame values
-#    v8dbg_off_fp_NAME = OFFSET                 Frame pointer offsets
-#    v8dbg_prop_NAME = OFFSET                   Object property offsets
-#    v8dbg_NAME = VALUE                         Miscellaneous values
-#
-# These constants are declared as global integers so that they'll be present in
-# the generated libv8 binary.
-#
-
-import re
-import sys
-
-#
-# Miscellaneous constants, tags, and masks used for object identification.
-#
-consts_misc = [
-    { 'name': 'FirstNonstringType',     'value': 'FIRST_NONSTRING_TYPE' },
-
-    { 'name': 'IsNotStringMask',        'value': 'kIsNotStringMask' },
-    { 'name': 'StringTag',              'value': 'kStringTag' },
-    { 'name': 'NotStringTag',           'value': 'kNotStringTag' },
-
-    { 'name': 'StringEncodingMask',     'value': 'kStringEncodingMask' },
-    { 'name': 'TwoByteStringTag',       'value': 'kTwoByteStringTag' },
-    { 'name': 'AsciiStringTag',         'value': 'kAsciiStringTag' },
-
-    { 'name': 'StringRepresentationMask',
-        'value': 'kStringRepresentationMask' },
-    { 'name': 'SeqStringTag',           'value': 'kSeqStringTag' },
-    { 'name': 'ConsStringTag',          'value': 'kConsStringTag' },
-    { 'name': 'ExternalStringTag',      'value': 'kExternalStringTag' },
-
-    { 'name': 'FailureTag',             'value': 'kFailureTag' },
-    { 'name': 'FailureTagMask',         'value': 'kFailureTagMask' },
-    { 'name': 'HeapObjectTag',          'value': 'kHeapObjectTag' },
-    { 'name': 'HeapObjectTagMask',      'value': 'kHeapObjectTagMask' },
-    { 'name': 'SmiTag',                 'value': 'kSmiTag' },
-    { 'name': 'SmiTagMask',             'value': 'kSmiTagMask' },
-    { 'name': 'SmiValueShift',          'value': 'kSmiTagSize' },
-    { 'name': 'PointerSizeLog2',        'value': 'kPointerSizeLog2' },
-
-    { 'name': 'prop_idx_content',
-        'value': 'DescriptorArray::kContentArrayIndex' },
-    { 'name': 'prop_idx_first',
-        'value': 'DescriptorArray::kFirstIndex' },
-    { 'name': 'prop_type_field',
-        'value': 'FIELD' },
-    { 'name': 'prop_type_first_phantom',
-        'value': 'MAP_TRANSITION' },
-    { 'name': 'prop_type_mask',
-        'value': 'PropertyDetails::TypeField::kMask' },
-
-    { 'name': 'off_fp_context',
-        'value': 'StandardFrameConstants::kContextOffset' },
-    { 'name': 'off_fp_marker',
-        'value': 'StandardFrameConstants::kMarkerOffset' },
-    { 'name': 'off_fp_function',
-        'value': 'JavaScriptFrameConstants::kFunctionOffset' },
-    { 'name': 'off_fp_args',
-        'value': 'JavaScriptFrameConstants::kLastParameterOffset' },
-];
-
-#
-# The following useful fields are missing accessors, so we define fake ones.
-#
-extras_accessors = [
-    'HeapObject, map, Map, kMapOffset',
-    'JSObject, elements, Object, kElementsOffset',
-    'FixedArray, data, uintptr_t, kHeaderSize',
-    'Map, instance_attributes, int, kInstanceAttributesOffset',
-    'Map, instance_descriptors, int, kInstanceDescriptorsOrBitField3Offset',
-    'Map, inobject_properties, int, kInObjectPropertiesOffset',
-    'Map, instance_size, int, kInstanceSizeOffset',
-    'HeapNumber, value, double, kValueOffset',
-    'ConsString, first, String, kFirstOffset',
-    'ConsString, second, String, kSecondOffset',
-    'ExternalString, resource, Object, kResourceOffset',
-    'SeqAsciiString, chars, char, kHeaderSize',
-    'SharedFunctionInfo, code, Code, kCodeOffset',
-    'Code, instruction_start, uintptr_t, kHeaderSize',
-    'Code, instruction_size, int, kInstructionSizeOffset',
-];
-
-#
-# The following is a whitelist of classes we expect to find when scanning the
-# source code. This list is not exhaustive, but it's still useful to identify
-# when this script gets out of sync with the source. See load_objects().
-#
-expected_classes = [
-    'ConsString', 'FixedArray', 'HeapNumber', 'JSArray', 'JSFunction',
-    'JSObject', 'JSRegExp', 'JSValue', 'Map', 'Oddball', 'Script',
-    'SeqAsciiString', 'SharedFunctionInfo'
-];
-
-
-#
-# The following structures store high-level representations of the structures
-# for which we're going to emit descriptive constants.
-#
-types = {};             # set of all type names
-typeclasses = {};       # maps type names to corresponding class names
-klasses = {};           # known classes, including parents
-fields = [];            # field declarations
-
-header = '''
-/*
- * This file is generated by %s.  Do not edit directly.
- */
-
-#include "v8.h"
-#include "frames.h"
-#include "frames-inl.h" /* for architecture-specific frame constants */
-
-using namespace v8::internal;
-
-extern "C" {
-
-/* stack frame constants */
-#define FRAME_CONST(value, klass)       \
-    int v8dbg_frametype_##klass = StackFrame::value;
-
-STACK_FRAME_TYPE_LIST(FRAME_CONST)
-
-#undef FRAME_CONST
-
-''' % sys.argv[0];
-
-footer = '''
-}
-'''
-
-#
-# Loads class hierarchy and type information from "objects.h".
-#
-def load_objects():
-        objfilename = sys.argv[2];
-        objfile = open(objfilename, 'r');
-        in_insttype = False;
-
-        typestr = '';
-
-        #
-        # Construct a dictionary for the classes we're sure should be present.
-        #
-        checktypes = {};
-        for klass in expected_classes:
-                checktypes[klass] = True;
-
-        #
-        # Iterate objects.h line-by-line to collect type and class information.
-        # For types, we accumulate a string representing the entire InstanceType
-        # enum definition and parse it later because it's easier to do so
-        # without the embedded newlines.
-        #
-        for line in objfile:
-                if (line.startswith('enum InstanceType {')):
-                        in_insttype = True;
-                        continue;
-
-                if (in_insttype and line.startswith('};')):
-                        in_insttype = False;
-                        continue;
-
-                line = re.sub('//.*', '', line.rstrip().lstrip());
-
-                if (in_insttype):
-                        typestr += line;
-                        continue;
-
-                match = re.match('class (\w[^\s:]*)(: public (\w[^\s{]*))?\s*{',
-                    line);
-
-                if (match):
-                        klass = match.group(1);
-                        pklass = match.group(3);
-                        klasses[klass] = { 'parent': pklass };
-
-        #
-        # Process the instance type declaration.
-        #
-        entries = typestr.split(',');
-        for entry in entries:
-                types[re.sub('\s*=.*', '', entry).lstrip()] = True;
-
-        #
-        # Infer class names for each type based on a systematic transformation.
-        # For example, "JS_FUNCTION_TYPE" becomes "JSFunction".  We find the
-        # class for each type rather than the other way around because there are
-        # fewer cases where one type maps to more than one class than the other
-        # way around.
-        #
-        for type in types:
-                #
-                # Symbols and Strings are implemented using the same classes.
-                #
-                usetype = re.sub('SYMBOL_', 'STRING_', type);
-
-                #
-                # REGEXP behaves like REG_EXP, as in JS_REGEXP_TYPE => JSRegExp.
-                #
-                usetype = re.sub('_REGEXP_', '_REG_EXP_', usetype);
-
-                #
-                # Remove the "_TYPE" suffix and then convert to camel case,
-                # except that a "JS" prefix remains uppercase (as in
-                # "JS_FUNCTION_TYPE" => "JSFunction").
-                #
-                if (not usetype.endswith('_TYPE')):
-                        continue;
-
-                usetype = usetype[0:len(usetype) - len('_TYPE')];
-                parts = usetype.split('_');
-                cctype = '';
-
-                if (parts[0] == 'JS'):
-                        cctype = 'JS';
-                        start = 1;
-                else:
-                        cctype = '';
-                        start = 0;
-
-                for ii in range(start, len(parts)):
-                        part = parts[ii];
-                        cctype += part[0].upper() + part[1:].lower();
-
-                #
-                # Mapping string types is more complicated.  Both types and
-                # class names for Strings specify a representation (e.g., Seq,
-                # Cons, External, or Sliced) and an encoding (TwoByte or Ascii),
-                # In the simplest case, both of these are explicit in both
-                # names, as in:
-                #
-                #       EXTERNAL_ASCII_STRING_TYPE => ExternalAsciiString
-                #
-                # However, either the representation or encoding can be omitted
-                # from the type name, in which case "Seq" and "TwoByte" are
-                # assumed, as in:
-                #
-                #       STRING_TYPE => SeqTwoByteString
-                #
-                # Additionally, sometimes the type name has more information
-                # than the class, as in:
-                #
-                #       CONS_ASCII_STRING_TYPE => ConsString
-                #
-                # To figure this out dynamically, we first check for a
-                # representation and encoding and add them if they're not
-                # present.  If that doesn't yield a valid class name, then we
-                # strip out the representation.
-                #
-                if (cctype.endswith('String')):
-                        if (cctype.find('Cons') == -1 and
-                            cctype.find('External') == -1 and
-                            cctype.find('Sliced') == -1):
-                                if (cctype.find('Ascii') != -1):
-                                        cctype = re.sub('AsciiString$',
-                                            'SeqAsciiString', cctype);
-                                else:
-                                        cctype = re.sub('String$',
-                                            'SeqString', cctype);
-
-                        if (cctype.find('Ascii') == -1):
-                                cctype = re.sub('String$', 'TwoByteString',
-                                    cctype);
-
-                        if (not (cctype in klasses)):
-                                cctype = re.sub('Ascii', '', cctype);
-                                cctype = re.sub('TwoByte', '', cctype);
-
-                #
-                # Despite all that, some types have no corresponding class.
-                #
-                if (cctype in klasses):
-                        typeclasses[type] = cctype;
-                        if (cctype in checktypes):
-                                del checktypes[cctype];
-
-        if (len(checktypes) > 0):
-                for klass in checktypes:
-                        print('error: expected class \"%s\" not found' % klass);
-
-                sys.exit(1);
-
-
-#
-# For a given macro call, pick apart the arguments and return an object
-# describing the corresponding output constant.  See load_fields().
-#
-def parse_field(call):
-        # Replace newlines with spaces.
-        for ii in range(0, len(call)):
-                if (call[ii] == '\n'):
-                        call[ii] == ' ';
-
-        idx = call.find('(');
-        kind = call[0:idx];
-        rest = call[idx + 1: len(call) - 1];
-        args = re.split('\s*,\s*', rest);
-
-        consts = [];
-
-        if (kind == 'ACCESSORS' or kind == 'ACCESSORS_GCSAFE'):
-                klass = args[0];
-                field = args[1];
-                dtype = args[2];
-                offset = args[3];
-
-                return ({
-                    'name': 'class_%s__%s__%s' % (klass, field, dtype),
-                    'value': '%s::%s' % (klass, offset)
-                });
-
-        assert(kind == 'SMI_ACCESSORS');
-        klass = args[0];
-        field = args[1];
-        offset = args[2];
-
-        return ({
-            'name': 'class_%s__%s__%s' % (klass, field, 'SMI'),
-            'value': '%s::%s' % (klass, offset)
-        });
-
-#
-# Load field offset information from objects-inl.h.
-#
-def load_fields():
-        inlfilename = sys.argv[3];
-        inlfile = open(inlfilename, 'r');
-
-        #
-        # Each class's fields and the corresponding offsets are described in the
-        # source by calls to macros like "ACCESSORS" (and friends).  All we do
-        # here is extract these macro invocations, taking into account that they
-        # may span multiple lines and may contain nested parentheses.  We also
-        # call parse_field() to pick apart the invocation.
-        #
-        prefixes = [ 'ACCESSORS', 'ACCESSORS_GCSAFE', 'SMI_ACCESSORS' ];
-        current = '';
-        opens = 0;
-
-        for line in inlfile:
-                if (opens > 0):
-                        # Continuation line
-                        for ii in range(0, len(line)):
-                                if (line[ii] == '('):
-                                        opens += 1;
-                                elif (line[ii] == ')'):
-                                        opens -= 1;
-
-                                if (opens == 0):
-                                        break;
-
-                        current += line[0:ii + 1];
-                        continue;
-
-                for prefix in prefixes:
-                        if (not line.startswith(prefix + '(')):
-                                continue;
-
-                        if (len(current) > 0):
-                                fields.append(parse_field(current));
-                                current = '';
-
-                        for ii in range(len(prefix), len(line)):
-                                if (line[ii] == '('):
-                                        opens += 1;
-                                elif (line[ii] == ')'):
-                                        opens -= 1;
-
-                                if (opens == 0):
-                                        break;
-
-                        current += line[0:ii + 1];
-
-        if (len(current) > 0):
-                fields.append(parse_field(current));
-                current = '';
-
-        for body in extras_accessors:
-                fields.append(parse_field('ACCESSORS(%s)' % body));
-
-#
-# Emit a block of constants.
-#
-def emit_set(out, consts):
-        for ii in range(0, len(consts)):
-                out.write('int v8dbg_%s = %s;\n' %
-                    (consts[ii]['name'], consts[ii]['value']));
-        out.write('\n');
-
-#
-# Emit the whole output file.
-#
-def emit_config():
-        out = file(sys.argv[1], 'w');
-
-        out.write(header);
-
-        out.write('/* miscellaneous constants */\n');
-        emit_set(out, consts_misc);
-
-        out.write('/* class type information */\n');
-        consts = [];
-        keys = typeclasses.keys();
-        keys.sort();
-        for typename in keys:
-                klass = typeclasses[typename];
-                consts.append({
-                    'name': 'type_%s__%s' % (klass, typename),
-                    'value': typename
-                });
-
-        emit_set(out, consts);
-
-        out.write('/* class hierarchy information */\n');
-        consts = [];
-        keys = klasses.keys();
-        keys.sort();
-        for klassname in keys:
-                pklass = klasses[klassname]['parent'];
-                if (pklass == None):
-                        continue;
-
-                consts.append({
-                    'name': 'parent_%s__%s' % (klassname, pklass),
-                    'value': 0
-                });
-
-        emit_set(out, consts);
-
-        out.write('/* field information */\n');
-        emit_set(out, fields);
-
-        out.write(footer);
-
-if (len(sys.argv) < 4):
-        print('usage: %s output.cc objects.h objects-inl.h' % sys.argv[0]);
-        sys.exit(2);
-
-load_objects();
-load_fields();
-emit_config();
diff --git a/tools/grokdump.py b/tools/grokdump.py
index 9977289..6bc49c6 100755
--- a/tools/grokdump.py
+++ b/tools/grokdump.py
@@ -52,7 +52,6 @@
   $ %prog 12345678-1234-1234-1234-123456789abcd-full.dmp
 """
 
-
 DEBUG=False
 
 
@@ -234,80 +233,6 @@
                 MD_CONTEXT_X86_EXTENDED_REGISTERS))
 ])
 
-MD_CONTEXT_AMD64 = 0x00100000
-MD_CONTEXT_AMD64_CONTROL = (MD_CONTEXT_AMD64 | 0x00000001)
-MD_CONTEXT_AMD64_INTEGER = (MD_CONTEXT_AMD64 | 0x00000002)
-MD_CONTEXT_AMD64_SEGMENTS = (MD_CONTEXT_AMD64 | 0x00000004)
-MD_CONTEXT_AMD64_FLOATING_POINT = (MD_CONTEXT_AMD64 | 0x00000008)
-MD_CONTEXT_AMD64_DEBUG_REGISTERS = (MD_CONTEXT_AMD64 | 0x00000010)
-
-MINIDUMP_CONTEXT_AMD64 = Descriptor([
-  ("p1_home", ctypes.c_uint64),
-  ("p2_home", ctypes.c_uint64),
-  ("p3_home", ctypes.c_uint64),
-  ("p4_home", ctypes.c_uint64),
-  ("p5_home", ctypes.c_uint64),
-  ("p6_home", ctypes.c_uint64),
-  ("context_flags", ctypes.c_uint32),
-  ("mx_csr", ctypes.c_uint32),
-  # MD_CONTEXT_AMD64_CONTROL.
-  ("cs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_CONTROL)),
-  # MD_CONTEXT_AMD64_SEGMENTS
-  ("ds", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
-  ("es", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
-  ("fs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
-  ("gs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
-  # MD_CONTEXT_AMD64_CONTROL.
-  ("ss", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_CONTROL)),
-  ("eflags", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_AMD64_CONTROL)),
-  # MD_CONTEXT_AMD64_DEBUG_REGISTERS.
-  ("dr0", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("dr1", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("dr2", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("dr3", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("dr6", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("dr7", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  # MD_CONTEXT_AMD64_INTEGER.
-  ("rax", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("rcx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("rdx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("rbx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  # MD_CONTEXT_AMD64_CONTROL.
-  ("rsp", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_CONTROL)),
-  # MD_CONTEXT_AMD64_INTEGER.
-  ("rbp", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("rsi", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("rdi", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("r8", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("r9", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("r10", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("r11", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("r12", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("r13", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("r14", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  ("r15", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
-  # MD_CONTEXT_AMD64_CONTROL.
-  ("rip", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_CONTROL)),
-  # MD_CONTEXT_AMD64_FLOATING_POINT
-  ("sse_registers", EnableOnFlag(ctypes.c_uint8 * (16 * 26),
-                                 MD_CONTEXT_AMD64_FLOATING_POINT)),
-  ("vector_registers", EnableOnFlag(ctypes.c_uint8 * (16 * 26),
-                                    MD_CONTEXT_AMD64_FLOATING_POINT)),
-  ("vector_control", EnableOnFlag(ctypes.c_uint64,
-                                  MD_CONTEXT_AMD64_FLOATING_POINT)),
-  # MD_CONTEXT_AMD64_DEBUG_REGISTERS.
-  ("debug_control", EnableOnFlag(ctypes.c_uint64,
-                                 MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("last_branch_to_rip", EnableOnFlag(ctypes.c_uint64,
-                                      MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("last_branch_from_rip", EnableOnFlag(ctypes.c_uint64,
-                                        MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("last_exception_to_rip", EnableOnFlag(ctypes.c_uint64,
-                                         MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
-  ("last_exception_from_rip", EnableOnFlag(ctypes.c_uint64,
-                                           MD_CONTEXT_AMD64_DEBUG_REGISTERS))
-])
-
 MINIDUMP_MEMORY_DESCRIPTOR = Descriptor([
   ("start", ctypes.c_uint64),
   ("memory", MINIDUMP_LOCATION_DESCRIPTOR.ctype)
@@ -344,12 +269,6 @@
   ("threads", lambda t: MINIDUMP_THREAD.ctype * t.thread_count)
 ])
 
-MINIDUMP_RAW_SYSTEM_INFO = Descriptor([
-  ("processor_architecture", ctypes.c_uint16)
-])
-
-MD_CPU_ARCHITECTURE_X86 = 0
-MD_CPU_ARCHITECTURE_AMD64 = 9
 
 class MinidumpReader(object):
   """Minidump (.dmp) reader."""
@@ -369,34 +288,20 @@
     for _ in xrange(self.header.stream_count):
       directories.append(MINIDUMP_DIRECTORY.Read(self.minidump, offset))
       offset += MINIDUMP_DIRECTORY.size
-    self.arch = None
     self.exception = None
     self.exception_context = None
     self.memory_list = None
     self.memory_list64 = None
     self.thread_map = {}
-
-    # Find MDRawSystemInfo stream and determine arch.
-    for d in directories:
-      if d.stream_type == MD_SYSTEM_INFO_STREAM:
-        system_info = MINIDUMP_RAW_SYSTEM_INFO.Read(
-            self.minidump, d.location.rva)
-        self.arch = system_info.processor_architecture
-        assert self.arch in [MD_CPU_ARCHITECTURE_AMD64, MD_CPU_ARCHITECTURE_X86]
-    assert not self.arch is None
-
     for d in directories:
       DebugPrint(d)
+      # TODO(vitalyr): extract system info including CPU features.
       if d.stream_type == MD_EXCEPTION_STREAM:
         self.exception = MINIDUMP_EXCEPTION_STREAM.Read(
           self.minidump, d.location.rva)
         DebugPrint(self.exception)
-        if self.arch == MD_CPU_ARCHITECTURE_X86:
-          self.exception_context = MINIDUMP_CONTEXT_X86.Read(
-              self.minidump, self.exception.thread_context.rva)
-        elif self.arch == MD_CPU_ARCHITECTURE_AMD64:
-          self.exception_context = MINIDUMP_CONTEXT_AMD64.Read(
-              self.minidump, self.exception.thread_context.rva)
+        self.exception_context = MINIDUMP_CONTEXT_X86.Read(
+          self.minidump, self.exception.thread_context.rva)
         DebugPrint(self.exception_context)
       elif d.stream_type == MD_THREAD_LIST_STREAM:
         thread_list = MINIDUMP_THREAD_LIST.Read(self.minidump, d.location.rva)
@@ -430,16 +335,6 @@
     location = self.FindLocation(address)
     return ctypes.c_uint32.from_buffer(self.minidump, location).value
 
-  def ReadU64(self, address):
-    location = self.FindLocation(address)
-    return ctypes.c_uint64.from_buffer(self.minidump, location).value
-
-  def ReadUIntPtr(self, address):
-    if self.arch == MD_CPU_ARCHITECTURE_AMD64:
-      return self.ReadU64(address)
-    elif self.arch == MD_CPU_ARCHITECTURE_X86:
-      return self.ReadU32(address)
-
   def ReadBytes(self, address, size):
     location = self.FindLocation(address)
     return self.minidump[location:location + size]
@@ -460,15 +355,10 @@
   def GetDisasmLines(self, address, size):
     location = self.FindLocation(address)
     if location is None: return []
-    arch = None
-    if self.arch == MD_CPU_ARCHITECTURE_X86:
-      arch = "ia32"
-    elif self.arch == MD_CPU_ARCHITECTURE_AMD64:
-      arch = "x64"
     return disasm.GetDisasmLines(self.minidump_name,
                                  location,
                                  size,
-                                 arch,
+                                 "ia32",
                                  False)
 
 
@@ -476,40 +366,13 @@
     self.minidump.close()
     self.minidump_file.close()
 
-  def ExceptionIP(self):
-    if self.arch == MD_CPU_ARCHITECTURE_AMD64:
-      return self.exception_context.rip
-    elif self.arch == MD_CPU_ARCHITECTURE_X86:
-      return self.exception_context.eip
-
-  def ExceptionSP(self):
-    if self.arch == MD_CPU_ARCHITECTURE_AMD64:
-      return self.exception_context.rsp
-    elif self.arch == MD_CPU_ARCHITECTURE_X86:
-      return self.exception_context.esp
-
-  def FormatIntPtr(self, value):
-    if self.arch == MD_CPU_ARCHITECTURE_AMD64:
-      return "%016x" % value
-    elif self.arch == MD_CPU_ARCHITECTURE_X86:
-      return "%08x" % value
-
-  def PointerSize(self):
-    if self.arch == MD_CPU_ARCHITECTURE_AMD64:
-      return 8
-    elif self.arch == MD_CPU_ARCHITECTURE_X86:
-      return 4
-
-  def Register(self, name):
-    return self.exception_context.__getattribute__(name)
-
 
 # List of V8 instance types. Obtained by adding the code below to any .cc file.
 #
-# #define DUMP_TYPE(T) printf("  %d: \"%s\",\n", T, #T);
+# #define DUMP_TYPE(T) printf("%d: \"%s\",\n", T, #T);
 # struct P {
 #   P() {
-#     printf("INSTANCE_TYPES = {\n");
+#     printf("{\n");
 #     INSTANCE_TYPE_LIST(DUMP_TYPE)
 #     printf("}\n");
 #   }
@@ -523,20 +386,13 @@
   66: "EXTERNAL_SYMBOL_TYPE",
   74: "EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE",
   70: "EXTERNAL_ASCII_SYMBOL_TYPE",
-  82: "SHORT_EXTERNAL_SYMBOL_TYPE",
-  90: "SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE",
-  86: "SHORT_EXTERNAL_ASCII_SYMBOL_TYPE",
   0: "STRING_TYPE",
   4: "ASCII_STRING_TYPE",
   1: "CONS_STRING_TYPE",
   5: "CONS_ASCII_STRING_TYPE",
-  3: "SLICED_STRING_TYPE",
   2: "EXTERNAL_STRING_TYPE",
   10: "EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
   6: "EXTERNAL_ASCII_STRING_TYPE",
-  18: "SHORT_EXTERNAL_STRING_TYPE",
-  26: "SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
-  22: "SHORT_EXTERNAL_ASCII_STRING_TYPE",
   6: "PRIVATE_EXTERNAL_ASCII_STRING_TYPE",
   128: "MAP_TYPE",
   129: "CODE_TYPE",
@@ -545,46 +401,43 @@
   132: "HEAP_NUMBER_TYPE",
   133: "FOREIGN_TYPE",
   134: "BYTE_ARRAY_TYPE",
-  135: "FREE_SPACE_TYPE",
-  136: "EXTERNAL_BYTE_ARRAY_TYPE",
-  137: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
-  138: "EXTERNAL_SHORT_ARRAY_TYPE",
-  139: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
-  140: "EXTERNAL_INT_ARRAY_TYPE",
-  141: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
-  142: "EXTERNAL_FLOAT_ARRAY_TYPE",
-  144: "EXTERNAL_PIXEL_ARRAY_TYPE",
-  146: "FILLER_TYPE",
-  147: "ACCESSOR_INFO_TYPE",
-  148: "ACCESSOR_PAIR_TYPE",
-  149: "ACCESS_CHECK_INFO_TYPE",
-  150: "INTERCEPTOR_INFO_TYPE",
-  151: "CALL_HANDLER_INFO_TYPE",
-  152: "FUNCTION_TEMPLATE_INFO_TYPE",
-  153: "OBJECT_TEMPLATE_INFO_TYPE",
-  154: "SIGNATURE_INFO_TYPE",
-  155: "TYPE_SWITCH_INFO_TYPE",
-  156: "SCRIPT_TYPE",
-  157: "CODE_CACHE_TYPE",
-  158: "POLYMORPHIC_CODE_CACHE_TYPE",
-  161: "FIXED_ARRAY_TYPE",
-  145: "FIXED_DOUBLE_ARRAY_TYPE",
-  162: "SHARED_FUNCTION_INFO_TYPE",
-  163: "JS_MESSAGE_OBJECT_TYPE",
-  166: "JS_VALUE_TYPE",
-  167: "JS_OBJECT_TYPE",
-  168: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
-  169: "JS_GLOBAL_OBJECT_TYPE",
-  170: "JS_BUILTINS_OBJECT_TYPE",
-  171: "JS_GLOBAL_PROXY_TYPE",
-  172: "JS_ARRAY_TYPE",
-  165: "JS_PROXY_TYPE",
-  175: "JS_WEAK_MAP_TYPE",
-  176: "JS_REGEXP_TYPE",
-  177: "JS_FUNCTION_TYPE",
-  164: "JS_FUNCTION_PROXY_TYPE",
-  159: "DEBUG_INFO_TYPE",
-  160: "BREAK_POINT_INFO_TYPE",
+  135: "EXTERNAL_BYTE_ARRAY_TYPE",
+  136: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
+  137: "EXTERNAL_SHORT_ARRAY_TYPE",
+  138: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
+  139: "EXTERNAL_INT_ARRAY_TYPE",
+  140: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
+  141: "EXTERNAL_FLOAT_ARRAY_TYPE",
+  143: "EXTERNAL_PIXEL_ARRAY_TYPE",
+  145: "FILLER_TYPE",
+  146: "ACCESSOR_INFO_TYPE",
+  147: "ACCESS_CHECK_INFO_TYPE",
+  148: "INTERCEPTOR_INFO_TYPE",
+  149: "CALL_HANDLER_INFO_TYPE",
+  150: "FUNCTION_TEMPLATE_INFO_TYPE",
+  151: "OBJECT_TEMPLATE_INFO_TYPE",
+  152: "SIGNATURE_INFO_TYPE",
+  153: "TYPE_SWITCH_INFO_TYPE",
+  154: "SCRIPT_TYPE",
+  155: "CODE_CACHE_TYPE",
+  156: "POLYMORPHIC_CODE_CACHE_TYPE",
+  159: "FIXED_ARRAY_TYPE",
+  160: "SHARED_FUNCTION_INFO_TYPE",
+  161: "JS_MESSAGE_OBJECT_TYPE",
+  162: "JS_VALUE_TYPE",
+  163: "JS_OBJECT_TYPE",
+  164: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
+  165: "JS_GLOBAL_OBJECT_TYPE",
+  166: "JS_BUILTINS_OBJECT_TYPE",
+  167: "JS_GLOBAL_PROXY_TYPE",
+  168: "JS_ARRAY_TYPE",
+  169: "JS_PROXY_TYPE",
+  170: "JS_WEAK_MAP_TYPE",
+  171: "JS_REGEXP_TYPE",
+  172: "JS_FUNCTION_TYPE",
+  173: "JS_FUNCTION_PROXY_TYPE",
+  157: "DEBUG_INFO_TYPE",
+  158: "BREAK_POINT_INFO_TYPE",
 }
 
 
@@ -648,36 +501,34 @@
     p.Print(str(self))
 
   def __str__(self):
-    return "HeapObject(%s, %s)" % (self.heap.reader.FormatIntPtr(self.address),
-                                   INSTANCE_TYPES[self.map.instance_type])
+    return "HeapObject(%08x, %s)" % (self.address,
+                                     INSTANCE_TYPES[self.map.instance_type])
 
   def ObjectField(self, offset):
-    field_value = self.heap.reader.ReadUIntPtr(self.address + offset)
+    field_value = self.heap.reader.ReadU32(self.address + offset)
     return self.heap.FindObjectOrSmi(field_value)
 
   def SmiField(self, offset):
-    field_value = self.heap.reader.ReadUIntPtr(self.address + offset)
+    field_value = self.heap.reader.ReadU32(self.address + offset)
     assert (field_value & 1) == 0
     return field_value / 2
 
 
 class Map(HeapObject):
-  def InstanceTypeOffset(self):
-    return self.heap.PointerSize() + self.heap.IntSize()
+  INSTANCE_TYPE_OFFSET = 8
 
   def __init__(self, heap, map, address):
     HeapObject.__init__(self, heap, map, address)
     self.instance_type = \
-        heap.reader.ReadU8(self.address + self.InstanceTypeOffset())
+        heap.reader.ReadU8(self.address + Map.INSTANCE_TYPE_OFFSET)
 
 
 class String(HeapObject):
-  def LengthOffset(self):
-    return self.heap.PointerSize()
+  LENGTH_OFFSET = 4
 
   def __init__(self, heap, map, address):
     HeapObject.__init__(self, heap, map, address)
-    self.length = self.SmiField(self.LengthOffset())
+    self.length = self.SmiField(String.LENGTH_OFFSET)
 
   def GetChars(self):
     return "?string?"
@@ -690,12 +541,11 @@
 
 
 class SeqString(String):
-  def CharsOffset(self):
-    return self.heap.PointerSize() * 3
+  CHARS_OFFSET = 12
 
   def __init__(self, heap, map, address):
     String.__init__(self, heap, map, address)
-    self.chars = heap.reader.ReadBytes(self.address + self.CharsOffset(),
+    self.chars = heap.reader.ReadBytes(self.address + SeqString.CHARS_OFFSET,
                                        self.length)
 
   def GetChars(self):
@@ -703,7 +553,6 @@
 
 
 class ExternalString(String):
-  # TODO(vegorov) fix ExternalString for X64 architecture
   RESOURCE_OFFSET = 12
 
   WEBKIT_RESOUCE_STRING_IMPL_OFFSET = 4
@@ -733,28 +582,24 @@
 
 
 class ConsString(String):
-  def LeftOffset(self):
-    return self.heap.PointerSize() * 3
-
-  def RightOffset(self):
-    return self.heap.PointerSize() * 4
+  LEFT_OFFSET = 12
+  RIGHT_OFFSET = 16
 
   def __init__(self, heap, map, address):
     String.__init__(self, heap, map, address)
-    self.left = self.ObjectField(self.LeftOffset())
-    self.right = self.ObjectField(self.RightOffset())
+    self.left = self.ObjectField(ConsString.LEFT_OFFSET)
+    self.right = self.ObjectField(ConsString.RIGHT_OFFSET)
 
   def GetChars(self):
     return self.left.GetChars() + self.right.GetChars()
 
 
 class Oddball(HeapObject):
-  def ToStringOffset(self):
-    return self.heap.PointerSize()
+  TO_STRING_OFFSET = 4
 
   def __init__(self, heap, map, address):
     HeapObject.__init__(self, heap, map, address)
-    self.to_string = self.ObjectField(self.ToStringOffset())
+    self.to_string = self.ObjectField(Oddball.TO_STRING_OFFSET)
 
   def Print(self, p):
     p.Print(str(self))
@@ -764,23 +609,19 @@
 
 
 class FixedArray(HeapObject):
-  def LengthOffset(self):
-    return self.heap.PointerSize()
-
-  def ElementsOffset(self):
-    return self.heap.PointerSize() * 2
+  LENGTH_OFFSET = 4
+  ELEMENTS_OFFSET = 8
 
   def __init__(self, heap, map, address):
     HeapObject.__init__(self, heap, map, address)
-    self.length = self.SmiField(self.LengthOffset())
+    self.length = self.SmiField(FixedArray.LENGTH_OFFSET)
 
   def Print(self, p):
-    p.Print("FixedArray(%s) {" % self.heap.reader.FormatIntPtr(self.address))
+    p.Print("FixedArray(%08x) {" % self.address)
     p.Indent()
     p.Print("length: %d" % self.length)
-    base_offset = self.ElementsOffset()
     for i in xrange(self.length):
-      offset = base_offset + 4 * i
+      offset = FixedArray.ELEMENTS_OFFSET + 4 * i
       p.Print("[%08d] = %s" % (i, self.ObjectField(offset)))
     p.Dedent()
     p.Print("}")
@@ -790,22 +631,19 @@
 
 
 class JSFunction(HeapObject):
-  def CodeEntryOffset(self):
-    return 3 * self.heap.PointerSize()
-
-  def SharedOffset(self):
-    return 5 * self.heap.PointerSize()
+  CODE_ENTRY_OFFSET = 12
+  SHARED_OFFSET = 20
 
   def __init__(self, heap, map, address):
     HeapObject.__init__(self, heap, map, address)
     code_entry = \
-        heap.reader.ReadU32(self.address + self.CodeEntryOffset())
-    self.code = heap.FindObject(code_entry - Code.HeaderSize(heap) + 1)
-    self.shared = self.ObjectField(self.SharedOffset())
+        heap.reader.ReadU32(self.address + JSFunction.CODE_ENTRY_OFFSET)
+    self.code = heap.FindObject(code_entry - Code.ENTRY_OFFSET + 1)
+    self.shared = self.ObjectField(JSFunction.SHARED_OFFSET)
 
   def Print(self, p):
     source = "\n".join("  %s" % line for line in self._GetSource().split("\n"))
-    p.Print("JSFunction(%s) {" % self.heap.reader.FormatIntPtr(self.address))
+    p.Print("JSFunction(%08x) {" % self.address)
     p.Indent()
     p.Print("inferred name: %s" % self.shared.inferred_name)
     if self.shared.script.Is(Script) and self.shared.script.name.Is(String):
@@ -824,8 +662,7 @@
     inferred_name = ""
     if self.shared.Is(SharedFunctionInfo):
       inferred_name = self.shared.inferred_name
-    return "JSFunction(%s, %s)" % \
-          (self.heap.reader.FormatIntPtr(self.address), inferred_name)
+    return "JSFunction(%08x, %s)" % (self.address, inferred_name)
 
   def _GetSource(self):
     source = "?source?"
@@ -838,75 +675,47 @@
 
 
 class SharedFunctionInfo(HeapObject):
-  def CodeOffset(self):
-    return 2 * self.heap.PointerSize()
-
-  def ScriptOffset(self):
-    return 7 * self.heap.PointerSize()
-
-  def InferredNameOffset(self):
-    return 9 * self.heap.PointerSize()
-
-  def EndPositionOffset(self):
-    return 12 * self.heap.PointerSize() + 4 * self.heap.IntSize()
-
-  def StartPositionAndTypeOffset(self):
-    return 12 * self.heap.PointerSize() + 5 * self.heap.IntSize()
+  CODE_OFFSET = 2 * 4
+  SCRIPT_OFFSET = 7 * 4
+  INFERRED_NAME_OFFSET = 9 * 4
+  START_POSITION_AND_TYPE_OFFSET = 17 * 4
+  END_POSITION_OFFSET = 18 * 4
 
   def __init__(self, heap, map, address):
     HeapObject.__init__(self, heap, map, address)
-    self.code = self.ObjectField(self.CodeOffset())
-    self.script = self.ObjectField(self.ScriptOffset())
-    self.inferred_name = self.ObjectField(self.InferredNameOffset())
-    if heap.PointerSize() == 8:
-      start_position_and_type = \
-          heap.reader.ReadU32(self.StartPositionAndTypeOffset())
-      self.start_position = start_position_and_type >> 2
-      pseudo_smi_end_position = \
-          heap.reader.ReadU32(self.EndPositionOffset())
-      self.end_position = pseudo_smi_end_position >> 2
-    else:
-      start_position_and_type = \
-          self.SmiField(self.StartPositionAndTypeOffset())
-      self.start_position = start_position_and_type >> 2
-      self.end_position = \
-          self.SmiField(self.EndPositionOffset())
+    self.code = self.ObjectField(SharedFunctionInfo.CODE_OFFSET)
+    self.script = self.ObjectField(SharedFunctionInfo.SCRIPT_OFFSET)
+    self.inferred_name = \
+        self.ObjectField(SharedFunctionInfo.INFERRED_NAME_OFFSET)
+    start_position_and_type = \
+        self.SmiField(SharedFunctionInfo.START_POSITION_AND_TYPE_OFFSET)
+    self.start_position = start_position_and_type >> 2
+    self.end_position = self.SmiField(SharedFunctionInfo.END_POSITION_OFFSET)
 
 
 class Script(HeapObject):
-  def SourceOffset(self):
-    return self.heap.PointerSize()
-
-  def NameOffset(self):
-    return self.SourceOffset() + self.heap.PointerSize()
+  SOURCE_OFFSET = 4
+  NAME_OFFSET = 8
 
   def __init__(self, heap, map, address):
     HeapObject.__init__(self, heap, map, address)
-    self.source = self.ObjectField(self.SourceOffset())
-    self.name = self.ObjectField(self.NameOffset())
+    self.source = self.ObjectField(Script.SOURCE_OFFSET)
+    self.name = self.ObjectField(Script.NAME_OFFSET)
 
 
 class Code(HeapObject):
-  CODE_ALIGNMENT_MASK = (1 << 5) - 1
-
-  def InstructionSizeOffset(self):
-    return self.heap.PointerSize()
-
-  @staticmethod
-  def HeaderSize(heap):
-    return (heap.PointerSize() + heap.IntSize() + \
-        4 * heap.PointerSize() + 3 * heap.IntSize() + \
-        Code.CODE_ALIGNMENT_MASK) & ~Code.CODE_ALIGNMENT_MASK
+  INSTRUCTION_SIZE_OFFSET = 4
+  ENTRY_OFFSET = 32
 
   def __init__(self, heap, map, address):
     HeapObject.__init__(self, heap, map, address)
-    self.entry = self.address + Code.HeaderSize(heap)
+    self.entry = self.address + Code.ENTRY_OFFSET
     self.instruction_size = \
-        heap.reader.ReadU32(self.address + self.InstructionSizeOffset())
+        heap.reader.ReadU32(self.address + Code.INSTRUCTION_SIZE_OFFSET)
 
   def Print(self, p):
     lines = self.heap.reader.GetDisasmLines(self.entry, self.instruction_size)
-    p.Print("Code(%s) {" % self.heap.reader.FormatIntPtr(self.address))
+    p.Print("Code(%08x) {" % self.address)
     p.Indent()
     p.Print("instruction_size: %d" % self.instruction_size)
     p.PrintLines(self._FormatLine(line) for line in lines)
@@ -926,9 +735,6 @@
     "EXTERNAL_SYMBOL_TYPE": ExternalString,
     "EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE": ExternalString,
     "EXTERNAL_ASCII_SYMBOL_TYPE": ExternalString,
-    "SHORT_EXTERNAL_SYMBOL_TYPE": ExternalString,
-    "SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE": ExternalString,
-    "SHORT_EXTERNAL_ASCII_SYMBOL_TYPE": ExternalString,
     "STRING_TYPE": SeqString,
     "ASCII_STRING_TYPE": SeqString,
     "CONS_STRING_TYPE": ConsString,
@@ -958,10 +764,10 @@
   def FindObject(self, tagged_address):
     if tagged_address in self.objects:
       return self.objects[tagged_address]
-    if (tagged_address & self.ObjectAlignmentMask()) != 1: return None
+    if (tagged_address & 1) != 1: return None
     address = tagged_address - 1
     if not self.reader.IsValidAddress(address): return None
-    map_tagged_address = self.reader.ReadUIntPtr(address)
+    map_tagged_address = self.reader.ReadU32(address)
     if tagged_address == map_tagged_address:
       # Meta map?
       meta_map = Map(self, None, address)
@@ -970,7 +776,7 @@
       meta_map.map = meta_map
       object = meta_map
     else:
-      map = self.FindMap(map_tagged_address)
+      map = self.FindObject(map_tagged_address)
       if map is None: return None
       instance_type_name = INSTANCE_TYPES.get(map.instance_type)
       if instance_type_name is None: return None
@@ -979,37 +785,9 @@
     self.objects[tagged_address] = object
     return object
 
-  def FindMap(self, tagged_address):
-    if (tagged_address & self.MapAlignmentMask()) != 1: return None
-    address = tagged_address - 1
-    if not self.reader.IsValidAddress(address): return None
-    object = Map(self, None, address)
-    return object
-
-  def IntSize(self):
-    return 4
-
-  def PointerSize(self):
-    return self.reader.PointerSize()
-
-  def ObjectAlignmentMask(self):
-    return self.PointerSize() - 1
-
-  def MapAlignmentMask(self):
-    if self.reader.arch == MD_CPU_ARCHITECTURE_AMD64:
-      return (1 << 4) - 1
-    elif self.reader.arch == MD_CPU_ARCHITECTURE_X86:
-      return (1 << 5) - 1
-
 
 EIP_PROXIMITY = 64
 
-CONTEXT_FOR_ARCH = {
-    MD_CPU_ARCHITECTURE_AMD64:
-      ['rax', 'rbx', 'rcx', 'rdx', 'rdi', 'rsi', 'rbp', 'rsp', 'rip'],
-    MD_CPU_ARCHITECTURE_X86:
-      ['eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp', 'eip']
-}
 
 def AnalyzeMinidump(options, minidump_name):
   reader = MinidumpReader(options, minidump_name)
@@ -1022,35 +800,40 @@
   print "  thread id: %d" % exception_thread.id
   print "  code: %08X" % reader.exception.exception.code
   print "  context:"
-  for r in CONTEXT_FOR_ARCH[reader.arch]:
-    print "    %s: %s" % (r, reader.FormatIntPtr(reader.Register(r)))
+  print "    eax: %08x" % reader.exception_context.eax
+  print "    ebx: %08x" % reader.exception_context.ebx
+  print "    ecx: %08x" % reader.exception_context.ecx
+  print "    edx: %08x" % reader.exception_context.edx
+  print "    edi: %08x" % reader.exception_context.edi
+  print "    esi: %08x" % reader.exception_context.esi
+  print "    ebp: %08x" % reader.exception_context.ebp
+  print "    esp: %08x" % reader.exception_context.esp
+  print "    eip: %08x" % reader.exception_context.eip
   # TODO(vitalyr): decode eflags.
   print "    eflags: %s" % bin(reader.exception_context.eflags)[2:]
   print
 
-  stack_top = reader.ExceptionSP()
   stack_bottom = exception_thread.stack.start + \
       exception_thread.stack.memory.data_size
-  stack_map = {reader.ExceptionIP(): -1}
-  for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
-    maybe_address = reader.ReadUIntPtr(slot)
+  stack_map = {reader.exception_context.eip: -1}
+  for slot in xrange(reader.exception_context.esp, stack_bottom, 4):
+    maybe_address = reader.ReadU32(slot)
     if not maybe_address in stack_map:
       stack_map[maybe_address] = slot
   heap = V8Heap(reader, stack_map)
 
   print "Disassembly around exception.eip:"
-  start = reader.ExceptionIP() - EIP_PROXIMITY
+  start = reader.exception_context.eip - EIP_PROXIMITY
   lines = reader.GetDisasmLines(start, 2 * EIP_PROXIMITY)
   for line in lines:
     print FormatDisasmLine(start, heap, line)
   print
 
   print "Annotated stack (from exception.esp to bottom):"
-  for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
-    maybe_address = reader.ReadUIntPtr(slot)
+  for slot in xrange(reader.exception_context.esp, stack_bottom, 4):
+    maybe_address = reader.ReadU32(slot)
     heap_object = heap.FindObject(maybe_address)
-    print "%s: %s" % (reader.FormatIntPtr(slot),
-                      reader.FormatIntPtr(maybe_address))
+    print "%08x: %08x" % (slot, maybe_address)
     if heap_object:
       heap_object.Print(Printer())
       print
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index 764789a..5014417 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -1,4 +1,4 @@
-# Copyright 2012 the V8 project authors. All rights reserved.
+# Copyright 2011 the V8 project authors. All rights reserved.
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
@@ -32,7 +32,6 @@
       'targets': [
         {
           'target_name': 'v8',
-          'dependencies_traverse': 1,
           'conditions': [
             ['want_separate_host_toolset==1', {
               'toolsets': ['host', 'target'],
@@ -40,16 +39,10 @@
               'toolsets': ['target'],
             }],
             ['v8_use_snapshot=="true"', {
-              # The dependency on v8_base should come from a transitive
-              # dependency however the Android toolchain requires libv8_base.a
-              # to appear before libv8_snapshot.a so it's listed explicitly.
-              'dependencies': ['v8_base', 'v8_snapshot'],
+              'dependencies': ['v8_snapshot'],
             },
             {
-              # The dependency on v8_base should come from a transitive
-              # dependency however the Android toolchain requires libv8_base.a
-              # to appear before libv8_snapshot.a so it's listed explicitly.
-              'dependencies': ['v8_base', 'v8_nosnapshot'],
+              'dependencies': ['v8_nosnapshot'],
             }],
             ['component=="shared_library"', {
               'type': '<(component)',
@@ -79,7 +72,11 @@
                   },
                 }],
                 ['soname_version!=""', {
-                  'product_extension': 'so.<(soname_version)',
+                  # Ideally, we'd like to specify the full filename for the
+                  # library and set it to "libv8.so.<(soname_version)",
+                  # but currently the best we can do is use 'product_name' and
+                  # get "libv8-<(soname_version).so".
+                  'product_name': 'v8-<(soname_version)',
                 }],
               ],
             },
@@ -228,9 +225,6 @@
         {
           'target_name': 'v8_base',
           'type': '<(library)',
-          'variables': {
-            'optimize': 'max',
-          },
           'include_dirs+': [
             '../../src',
           ],
@@ -246,8 +240,8 @@
             '../../src/assembler.cc',
             '../../src/assembler.h',
             '../../src/ast.cc',
+            '../../src/ast-inl.h',
             '../../src/ast.h',
-            '../../src/atomicops.h',
             '../../src/atomicops_internals_x86_gcc.cc',
             '../../src/bignum.cc',
             '../../src/bignum.h',
@@ -289,8 +283,6 @@
             '../../src/cpu-profiler.h',
             '../../src/data-flow.cc',
             '../../src/data-flow.h',
-            '../../src/date.cc',
-            '../../src/date.h',
             '../../src/dateparser.cc',
             '../../src/dateparser.h',
             '../../src/dateparser-inl.h',
@@ -334,6 +326,7 @@
             '../../src/handles-inl.h',
             '../../src/handles.cc',
             '../../src/handles.h',
+            '../../src/hashmap.cc',
             '../../src/hashmap.h',
             '../../src/heap-inl.h',
             '../../src/heap.cc',
@@ -347,12 +340,8 @@
             '../../src/ic-inl.h',
             '../../src/ic.cc',
             '../../src/ic.h',
-            '../../src/incremental-marking.cc',
-            '../../src/incremental-marking.h',
             '../../src/inspector.cc',
             '../../src/inspector.h',
-            '../../src/interface.cc',
-            '../../src/interface.h',
             '../../src/interpreter-irregexp.cc',
             '../../src/interpreter-irregexp.h',
             '../../src/json-parser.h',
@@ -360,7 +349,6 @@
             '../../src/jsregexp.h',
             '../../src/isolate.cc',
             '../../src/isolate.h',
-            '../../src/lazy-instance.h'
             '../../src/list-inl.h',
             '../../src/list.h',
             '../../src/lithium.cc',
@@ -391,8 +379,6 @@
             '../../src/objects-visiting.h',
             '../../src/objects.cc',
             '../../src/objects.h',
-            '../../src/once.cc',
-            '../../src/once.h',
             '../../src/parser.cc',
             '../../src/parser.h',
             '../../src/platform-tls-mac.h',
@@ -408,7 +394,6 @@
             '../../src/prettyprinter.h',
             '../../src/property.cc',
             '../../src/property.h',
-            '../../src/property-details.h',
             '../../src/profile-generator-inl.h',
             '../../src/profile-generator.cc',
             '../../src/profile-generator.h',
@@ -446,9 +431,6 @@
             '../../src/spaces-inl.h',
             '../../src/spaces.cc',
             '../../src/spaces.h',
-            '../../src/store-buffer-inl.h',
-            '../../src/store-buffer.cc',
-            '../../src/store-buffer.h',
             '../../src/string-search.cc',
             '../../src/string-search.h',
             '../../src/string-stream.cc',
@@ -567,40 +549,6 @@
                 '../../src/ia32/stub-cache-ia32.cc',
               ],
             }],
-            ['v8_target_arch=="mips"', {
-              'sources': [
-                '../../src/mips/assembler-mips.cc',
-                '../../src/mips/assembler-mips.h',
-                '../../src/mips/assembler-mips-inl.h',
-                '../../src/mips/builtins-mips.cc',
-                '../../src/mips/codegen-mips.cc',
-                '../../src/mips/codegen-mips.h',
-                '../../src/mips/code-stubs-mips.cc',
-                '../../src/mips/code-stubs-mips.h',
-                '../../src/mips/constants-mips.cc',
-                '../../src/mips/constants-mips.h',
-                '../../src/mips/cpu-mips.cc',
-                '../../src/mips/debug-mips.cc',
-                '../../src/mips/deoptimizer-mips.cc',
-                '../../src/mips/disasm-mips.cc',
-                '../../src/mips/frames-mips.cc',
-                '../../src/mips/frames-mips.h',
-                '../../src/mips/full-codegen-mips.cc',
-                '../../src/mips/ic-mips.cc',
-                '../../src/mips/lithium-codegen-mips.cc',
-                '../../src/mips/lithium-codegen-mips.h',
-                '../../src/mips/lithium-gap-resolver-mips.cc',
-                '../../src/mips/lithium-gap-resolver-mips.h',
-                '../../src/mips/lithium-mips.cc',
-                '../../src/mips/lithium-mips.h',
-                '../../src/mips/macro-assembler-mips.cc',
-                '../../src/mips/macro-assembler-mips.h',
-                '../../src/mips/regexp-macro-assembler-mips.cc',
-                '../../src/mips/regexp-macro-assembler-mips.h',
-                '../../src/mips/simulator-mips.cc',
-                '../../src/mips/stub-cache-mips.cc',
-              ],
-            }],
             ['v8_target_arch=="x64" or v8_target_arch=="mac" or OS=="mac"', {
               'sources': [
                 '../../src/x64/assembler-x64-inl.h',
@@ -638,8 +586,7 @@
                     ['v8_compress_startup_data=="bz2"', {
                       'libraries': [
                         '-lbz2',
-                      ]
-                    }],
+                    ]}],
                   ],
                 },
                 'sources': [
@@ -649,30 +596,26 @@
               }
             ],
             ['OS=="android"', {
-                'defines': [
-                  'CAN_USE_VFP_INSTRUCTIONS',
-                ],
                 'sources': [
                   '../../src/platform-posix.cc',
                 ],
                 'conditions': [
-                  ['host_os=="mac"', {
-                    'target_conditions': [
-                      ['_toolset=="host"', {
-                        'sources': [
-                          '../../src/platform-macos.cc'
-                        ]
-                      }, {
-                        'sources': [
-                          '../../src/platform-linux.cc'
-                        ]
-                      }],
-                    ],
+                  ['host_os=="mac" and _toolset!="target"', {
+                    'sources': [
+                      '../../src/platform-macos.cc'
+                    ]
                   }, {
                     'sources': [
                       '../../src/platform-linux.cc'
                     ]
                   }],
+                  ['_toolset=="target"', {
+                    'link_settings': {
+                      'libraries': [
+                        '-llog',
+                       ],
+                     }
+                  }],
                 ],
               },
             ],
@@ -698,28 +641,6 @@
                 ],
               }
             ],
-            ['OS=="netbsd"', {
-                'link_settings': {
-                  'libraries': [
-                    '-L/usr/pkg/lib -Wl,-R/usr/pkg/lib -lexecinfo',
-                ]},
-                'sources': [
-                  '../../src/platform-openbsd.cc',
-                  '../../src/platform-posix.cc'
-                ],
-              }
-            ],
-            ['OS=="solaris"', {
-                'link_settings': {
-                  'libraries': [
-                    '-lsocket -lnsl',
-                ]},
-                'sources': [
-                  '../../src/platform-solaris.cc',
-                  '../../src/platform-posix.cc',
-                ],
-              }
-            ],
             ['OS=="mac"', {
               'sources': [
                 '../../src/platform-macos.cc',
@@ -743,11 +664,6 @@
                 'V8_SHARED',
               ],
             }],
-            ['v8_postmortem_support=="true"', {
-              'sources': [
-                '<(SHARED_INTERMEDIATE_DIR)/debug-support.cc',
-              ]
-            }],
           ],
         },
         {
@@ -781,7 +697,7 @@
             'experimental_library_files': [
               '../../src/macros.py',
               '../../src/proxy.js',
-              '../../src/collection.js',
+              '../../src/weakmap.js',
             ],
           },
           'actions': [
@@ -824,38 +740,9 @@
           ],
         },
         {
-          'target_name': 'postmortem-metadata',
-          'type': 'none',
-          'variables': {
-            'heapobject_files': [
-                '../../src/objects.h',
-                '../../src/objects-inl.h',
-            ],
-          },
-          'actions': [
-              {
-                'action_name': 'gen-postmortem-metadata',
-                'inputs': [
-                  '../../tools/gen-postmortem-metadata.py',
-                  '<@(heapobject_files)',
-                ],
-                'outputs': [
-                  '<(SHARED_INTERMEDIATE_DIR)/debug-support.cc',
-                ],
-                'action': [
-                  'python',
-                  '../../tools/gen-postmortem-metadata.py',
-                  '<@(_outputs)',
-                  '<@(heapobject_files)'
-                ]
-              }
-           ]
-        },
-        {
           'target_name': 'mksnapshot',
           'type': 'executable',
           'dependencies': [
-            'v8_base',
             'v8_nosnapshot',
           ],
           'include_dirs+': [
@@ -873,8 +760,8 @@
             ['v8_compress_startup_data=="bz2"', {
               'libraries': [
                 '-lbz2',
-              ]
-            }],
+              ]}
+            ],
           ],
         },
         {
@@ -899,8 +786,7 @@
             ['v8_compress_startup_data=="bz2"', {
               'libraries': [
                 '-lbz2',
-              ]
-            }],
+              ]}],
           ],
         },
         {
@@ -914,8 +800,6 @@
             '../../include/v8stdint.h',
             '../../src/allocation.cc',
             '../../src/allocation.h',
-            '../../src/atomicops.h',
-            '../../src/atomicops_internals_x86_gcc.cc',
             '../../src/bignum.cc',
             '../../src/bignum.h',
             '../../src/bignum-dtoa.cc',
@@ -938,11 +822,10 @@
             '../../src/fixed-dtoa.cc',
             '../../src/fixed-dtoa.h',
             '../../src/globals.h',
+            '../../src/hashmap.cc',
             '../../src/hashmap.h',
             '../../src/list-inl.h',
             '../../src/list.h',
-            '../../src/once.cc',
-            '../../src/once.h',
             '../../src/preparse-data-format.h',
             '../../src/preparse-data.cc',
             '../../src/preparse-data.h',
@@ -975,7 +858,7 @@
       'targets': [
         {
           'target_name': 'v8',
-          'type': 'none',
+          'type': 'settings',
           'conditions': [
             ['want_separate_host_toolset==1', {
               'toolsets': ['host', 'target'],
diff --git a/tools/js2c.py b/tools/js2c.py
index fa559f3..a2ea8ea 100644
--- a/tools/js2c.py
+++ b/tools/js2c.py
@@ -128,13 +128,12 @@
       end = pattern_match.end()
       assert lines[end - 1] == '('
       last_match = end
-      arg_index = [0]  # Wrap state into array, to work around Python "scoping"
+      arg_index = 0
       mapping = { }
       def add_arg(str):
         # Remember to expand recursively in the arguments
         replacement = ExpandMacros(str.strip(), macros)
-        mapping[macro.args[arg_index[0]]] = replacement
-        arg_index[0] += 1
+        mapping[macro.args[arg_index]] = replacement
       while end < len(lines) and height > 0:
         # We don't count commas at higher nesting levels.
         if lines[end] == ',' and height == 1:
diff --git a/tools/jsmin.py b/tools/jsmin.py
index e82f3d0..646bf14 100644
--- a/tools/jsmin.py
+++ b/tools/jsmin.py
@@ -232,9 +232,7 @@
       # A regexp that matches a regexp literal surrounded by /slashes/.
       # Don't allow a regexp to have a ) before the first ( since that's a
       # syntax error and it's probably just two unrelated slashes.
-      # Also don't allow it to come after anything that can only be the
-      # end of a primary expression.
-      slash_quoted_regexp = r"(?<![\w$'\")\]])/(?:(?=\()|(?:[^()/\\]|\\.)+)(?:\([^/\\]|\\.)*/"
+      slash_quoted_regexp = r"/(?:(?=\()|(?:[^()/\\]|\\.)+)(?:\([^/\\]|\\.)*/"
       # Replace multiple spaces with a single space.
       line = re.sub("|".join([double_quoted_string,
                               single_quoted_string,
diff --git a/tools/linux-tick-processor b/tools/linux-tick-processor
index 7070ce6..0b0a1fb 100755
--- a/tools/linux-tick-processor
+++ b/tools/linux-tick-processor
@@ -1,14 +1,5 @@
 #!/bin/sh
 
-# find the name of the log file to process, it must not start with a dash.
-log_file="v8.log"
-for arg in "$@"
-do
-  if ! expr "X${arg}" : "^X-" > /dev/null; then
-    log_file=${arg}
-  fi
-done
-
 tools_path=`cd $(dirname "$0");pwd`
 if [ ! "$D8_PATH" ]; then
   d8_public=`which d8`
@@ -18,20 +9,22 @@
 d8_exec=$D8_PATH/d8
 
 if [ ! -x $d8_exec ]; then
-  D8_PATH=`pwd`/out/native
-  d8_exec=$D8_PATH/d8
-fi
-
-if [ ! -x $d8_exec ]; then
-  d8_exec=`grep -m 1 -o '".*/d8"' $log_file | sed 's/"//g'`
-fi
-
-if [ ! -x $d8_exec ]; then
   echo "d8 shell not found in $D8_PATH"
-  echo "To build, execute 'make native' from the V8 directory"
+  echo "To build, execute 'scons <flags> d8' from the V8 directory"
   exit 1
 fi
 
+
+# find the name of the log file to process, it must not start with a dash.
+log_file="v8.log"
+for arg in "$@"
+do
+  if ! expr "X${arg}" : "^X-" > /dev/null; then
+    log_file=${arg}
+  fi
+done
+
+
 # nm spits out 'no symbols found' messages to stderr.
 cat $log_file | $d8_exec $tools_path/splaytree.js $tools_path/codemap.js \
   $tools_path/csvparser.js $tools_path/consarray.js \
diff --git a/tools/ll_prof.py b/tools/ll_prof.py
index 51ba672..58cbb95 100755
--- a/tools/ll_prof.py
+++ b/tools/ll_prof.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Copyright 2012 the V8 project authors. All rights reserved.
+# Copyright 2010 the V8 project authors. All rights reserved.
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
@@ -334,7 +334,6 @@
   _ARCH_TO_POINTER_TYPE_MAP = {
     "ia32": ctypes.c_uint32,
     "arm": ctypes.c_uint32,
-    "mips": ctypes.c_uint32,
     "x64": ctypes.c_uint64
   }
 
@@ -400,16 +399,12 @@
         code = Code(name, start_address, end_address, origin, origin_offset)
         conficting_code = self.code_map.Find(start_address)
         if conficting_code:
-          if not (conficting_code.start_address == code.start_address and
-            conficting_code.end_address == code.end_address):
-            self.code_map.Remove(conficting_code)
-          else:
-            LogReader._HandleCodeConflict(conficting_code, code)
-            # TODO(vitalyr): this warning is too noisy because of our
-            # attempts to reconstruct code log from the snapshot.
-            # print >>sys.stderr, \
-            #     "Warning: Skipping duplicate code log entry %s" % code
-            continue
+          LogReader._HandleCodeConflict(conficting_code, code)
+          # TODO(vitalyr): this warning is too noisy because of our
+          # attempts to reconstruct code log from the snapshot.
+          # print >>sys.stderr, \
+          #     "Warning: Skipping duplicate code log entry %s" % code
+          continue
         self.code_map.Add(code)
         continue
 
@@ -673,9 +668,7 @@
 OBJDUMP_SYMBOL_LINE_RE = re.compile(
   r"^([a-f0-9]+)\s(.{7})\s(\S+)\s+([a-f0-9]+)\s+(?:\.hidden\s+)?(.*)$")
 OBJDUMP_DYNAMIC_SYMBOLS_START_RE = re.compile(
-  r"^DYNAMIC SYMBOL TABLE")
-OBJDUMP_SKIP_RE = re.compile(
-  r"^.*ld\.so\.cache$")
+   r"^DYNAMIC SYMBOL TABLE")
 KERNEL_ALLSYMS_FILE = "/proc/kallsyms"
 PERF_KERNEL_ALLSYMS_RE = re.compile(
   r".*kallsyms.*")
@@ -694,8 +687,6 @@
     # is 0.
     if mmap_info.tid == 0 and not options.kernel:
       return True
-    if OBJDUMP_SKIP_RE.match(mmap_info.filename):
-      return True
     if PERF_KERNEL_ALLSYMS_RE.match(mmap_info.filename):
       return self._LoadKernelSymbols(code_map)
     self.infos.append(mmap_info)
diff --git a/tools/logreader.js b/tools/logreader.js
index a8141da..315e721 100644
--- a/tools/logreader.js
+++ b/tools/logreader.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2009 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -134,8 +134,9 @@
 LogReader.prototype.dispatchLogRow_ = function(fields) {
   // Obtain the dispatch.
   var command = fields[0];
-  if (!(command in this.dispatchTable_)) return;
-
+  if (!(command in this.dispatchTable_)) {
+    throw new Error('unknown command: ' + command);
+  }
   var dispatch = this.dispatchTable_[command];
 
   if (dispatch === null || this.skipDispatch(dispatch)) {
diff --git a/tools/merge-to-branch.sh b/tools/merge-to-branch.sh
deleted file mode 100644
index 49bf3e4..0000000
--- a/tools/merge-to-branch.sh
+++ /dev/null
@@ -1,267 +0,0 @@
-#!/bin/bash
-# Copyright 2012 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-#       notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-#       copyright notice, this list of conditions and the following
-#       disclaimer in the documentation and/or other materials provided
-#       with the distribution.
-#     * Neither the name of Google Inc. nor the names of its
-#       contributors may be used to endorse or promote products derived
-#       from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-########## Global variable definitions
-
-BRANCHNAME=prepare-merge
-PERSISTFILE_BASENAME=/tmp/v8-merge-to-branch-tempfile
-ALREADY_MERGING_SENTINEL_FILE="$PERSISTFILE_BASENAME-already-merging"
-COMMIT_HASHES_FILE="$PERSISTFILE_BASENAME-PATCH_COMMIT_HASHES"
-TEMPORARY_PATCH_FILE="$PERSISTFILE_BASENAME-temporary-patch"
-
-########## Function definitions
-
-source $(dirname $BASH_SOURCE)/common-includes.sh
-
-usage() {
-cat << EOF
-usage: $0 [OPTIONS]... [BRANCH] [REVISION]...
-
-Performs the necessary steps to merge revisions from bleeding_edge
-to other branches, including trunk.
-
-OPTIONS:
-  -h    Show this message
-  -s    Specify the step where to start work. Default: 0.
-  -p    Specify a patch file to apply as part of the merge
-EOF
-}
-
-persist_patch_commit_hashes() {
-  echo "PATCH_COMMIT_HASHES=( ${PATCH_COMMIT_HASHES[@]} )" > $COMMIT_HASHES_FILE
-}
-
-restore_patch_commit_hashes() {
-  source $COMMIT_HASHES_FILE
-}
-
-restore_patch_commit_hashes_if_unset() {
-  [[ "${#PATCH_COMMIT_HASHES[@]}" == 0 ]] && restore_patch_commit_hashes
-  [[ "${#PATCH_COMMIT_HASHES[@]}" == 0 ]] && [[ -z "$EXTRA_PATCH" ]] && \
-      die "Variable PATCH_COMMIT_HASHES could not be restored."
-}
-
-########## Option parsing
-
-while getopts ":hs:fp:" OPTION ; do
-  case $OPTION in
-    h)  usage
-        exit 0
-        ;;
-    p)  EXTRA_PATCH=$OPTARG
-        ;;
-    f)  rm -f "$ALREADY_MERGING_SENTINEL_FILE"
-        ;;
-    s)  START_STEP=$OPTARG
-        ;;
-    ?)  echo "Illegal option: -$OPTARG"
-        usage
-        exit 1
-        ;;
-  esac
-done
-let OPTION_COUNT=$OPTIND-1
-shift $OPTION_COUNT
-
-########## Regular workflow
-
-# If there is a merge in progress, abort.
-[[ -e "$ALREADY_MERGING_SENTINEL_FILE" ]] && [[ $START_STEP -eq 0 ]] \
-   && die "A merge is already in progress"
-touch "$ALREADY_MERGING_SENTINEL_FILE"
-
-initial_environment_checks
-
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  if [ ${#@} -lt 2 ] && [ -z "$EXTRA_PATCH" ] ; then
-    die "Either a patch file or revision numbers must be specified"
-  fi
-  echo ">>> Step $CURRENT_STEP: Preparation"
-  MERGE_TO_BRANCH=$1
-  [[ -n "$MERGE_TO_BRANCH" ]] || die "Please specify a branch to merge to"
-  shift
-  persist "MERGE_TO_BRANCH"
-  common_prepare
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Create a fresh branch for the patch."
-  restore_if_unset "MERGE_TO_BRANCH"
-  git checkout -b $BRANCHNAME svn/$MERGE_TO_BRANCH \
-    || die "Creating branch $BRANCHNAME failed."
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Find the git \
-revisions associated with the patches."
-  current=0
-  for REVISION in "$@" ; do
-    NEXT_HASH=$(git svn find-rev "r$REVISION" svn/bleeding_edge)
-    [[ -n "$NEXT_HASH" ]] \
-      || die "Cannot determine git hash for r$REVISION"
-    PATCH_COMMIT_HASHES[$current]="$NEXT_HASH"
-    [[ -n "$REVISION_LIST" ]] && REVISION_LIST="$REVISION_LIST,"
-    REVISION_LIST="$REVISION_LIST r$REVISION"
-    let current+=1
-  done
-  if [ -z "$REVISION_LIST" ] ; then
-    NEW_COMMIT_MSG="Applied patch to $MERGE_TO_BRANCH branch."
-  else
-    NEW_COMMIT_MSG="Merged$REVISION_LIST into $MERGE_TO_BRANCH branch."
-  fi;
-
-  echo "$NEW_COMMIT_MSG" > $COMMITMSG_FILE
-  echo "" >> $COMMITMSG_FILE
-  for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
-    PATCH_MERGE_DESCRIPTION=$(git log -1 --format=%s $HASH)
-    echo "$PATCH_MERGE_DESCRIPTION" >> $COMMITMSG_FILE
-    echo "" >> $COMMITMSG_FILE
-  done
-  for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
-    BUG=$(git log -1 $HASH | grep "BUG=" | awk -F '=' '{print $NF}')
-    if [ -n "$BUG" ] ; then
-      [[ -n "$BUG_AGGREGATE" ]] && BUG_AGGREGATE="$BUG_AGGREGATE,"
-      BUG_AGGREGATE="$BUG_AGGREGATE$BUG"
-    fi
-  done
-  if [ -n "$BUG_AGGREGATE" ] ; then
-    echo "BUG=$BUG_AGGREGATE" >> $COMMITMSG_FILE
-  fi
-  persist "NEW_COMMIT_MSG"
-  persist "REVISION_LIST"
-  persist_patch_commit_hashes
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Apply patches for selected revisions."
-  restore_if_unset "MERGE_TO_BRANCH"
-  restore_patch_commit_hashes_if_unset "PATCH_COMMIT_HASHES"
-  rm -f "$TOUCHED_FILES_FILE"
-  for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
-    echo "Applying patch for $HASH to $MERGE_TO_BRANCH..."
-    git log -1 -p $HASH > "$TEMPORARY_PATCH_FILE"
-    apply_patch "$TEMPORARY_PATCH_FILE"
-  done
-  if [ -n "$EXTRA_PATCH" ] ; then
-    apply_patch "$EXTRA_PATCH"
-  fi
-  stage_files
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Prepare $VERSION_FILE."
-  # These version numbers are used again for creating the tag
-  read_and_persist_version
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Increment version number."
-  restore_if_unset "PATCH"
-  NEWPATCH=$(($PATCH + 1))
-  confirm "Automatically increment PATCH_LEVEL? (Saying 'n' will fire up \
-your EDITOR on $VERSION_FILE so you can make arbitrary changes. When \
-you're done, save the file and exit your EDITOR.)"
-  if [ $? -eq 0 ] ; then
-    sed -e "/#define PATCH_LEVEL/s/[0-9]*$/$NEWPATCH/" \
-        -i "$VERSION_FILE"
-  else
-    $EDITOR "$VERSION_FILE"
-  fi
-  read_and_persist_version "NEW"
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Commit to local branch."
-  git commit -a -F "$COMMITMSG_FILE" \
-    || die "'git commit -a' failed."
-fi
-
-upload_step
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Commit to the repository."
-  restore_if_unset "MERGE_TO_BRANCH"
-  git checkout $BRANCHNAME \
-    || die "cannot ensure that the current branch is $BRANCHNAME"
-  wait_for_lgtm
-  git cl dcommit || die "failed to commit to $MERGE_TO_BRANCH"
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Determine svn commit revision"
-  restore_if_unset "NEW_COMMIT_MSG"
-  restore_if_unset "MERGE_TO_BRANCH"
-  git svn fetch || die "'git svn fetch' failed."
-  COMMIT_HASH=$(git log -1 --format=%H --grep="$NEW_COMMIT_MSG" \
-    svn/$MERGE_TO_BRANCH)
-  [[ -z "$COMMIT_HASH" ]] && die "Unable to map git commit to svn revision"
-  SVN_REVISION=$(git svn find-rev $COMMIT_HASH)
-  echo "subversion revision number is r$SVN_REVISION"
-  persist "SVN_REVISION"
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Create the tag."
-  restore_if_unset "SVN_REVISION"
-  restore_version_if_unset "NEW"
-  echo "Creating tag svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
-  if [ "$MERGE_TO_BRANCH" == "trunk" ] ; then
-    TO_URL="$MERGE_TO_BRANCH"
-  else
-    TO_URL="branches/$MERGE_TO_BRANCH"
-  fi
-  svn copy -r $SVN_REVISION \
-    https://v8.googlecode.com/svn/$TO_URL \
-    https://v8.googlecode.com/svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH \
-    -m "Tagging version $NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
-  persist "TO_URL"
-fi
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Cleanup."
-  restore_if_unset "SVN_REVISION"
-  restore_if_unset "TO_URL"
-  restore_if_unset "REVISION_LIST"
-  restore_version_if_unset "NEW"
-  common_cleanup
-  echo "*** SUMMARY ***"
-  echo "version: $NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
-  echo "branch: $TO_URL"
-  echo "svn revision: $SVN_REVISION"
-  [[ -n "$REVISION_LIST" ]] && echo "patches:$REVISION_LIST"
-fi
diff --git a/tools/presubmit.py b/tools/presubmit.py
index a5f4c61..fda7ba9 100755
--- a/tools/presubmit.py
+++ b/tools/presubmit.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Copyright 2012 the V8 project authors. All rights reserved.
+# Copyright 2011 the V8 project authors. All rights reserved.
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
@@ -42,7 +42,6 @@
 import re
 import sys
 import subprocess
-import multiprocessing
 from subprocess import PIPE
 
 # Disabled LINT rules and reason.
@@ -102,33 +101,6 @@
 """.split()
 
 
-LINT_OUTPUT_PATTERN = re.compile(r'^.+[:(]\d+[:)]|^Done processing')
-
-
-def CppLintWorker(command):
-  try:
-    process = subprocess.Popen(command, stderr=subprocess.PIPE)
-    process.wait()
-    out_lines = ""
-    error_count = -1
-    while True:
-      out_line = process.stderr.readline()
-      if out_line == '' and process.poll() != None:
-        break
-      m = LINT_OUTPUT_PATTERN.match(out_line)
-      if m:
-        out_lines += out_line
-        error_count += 1
-    sys.stderr.write(out_lines)
-    return error_count
-  except KeyboardInterrupt:
-    process.kill()
-  except:
-    print('Error running cpplint.py. Please make sure you have depot_tools' +
-          ' in your $PATH. Lint check skipped.')
-    process.kill()
-
-
 class FileContentsCache(object):
 
   def __init__(self, sums_file_name):
@@ -234,28 +206,24 @@
       return True
 
     filt = '-,' + ",".join(['+' + n for n in ENABLED_LINT_RULES])
-    command = ['cpplint.py', '--filter', filt]
+    command = ['cpplint.py', '--filter', filt] + join(files)
     local_cpplint = join(path, "tools", "cpplint.py")
     if exists(local_cpplint):
-      command = ['python', local_cpplint, '--filter', filt]
+      command = ['python', local_cpplint, '--filter', filt] + join(files)
 
-    commands = join([command + [file] for file in files])
-    count = multiprocessing.cpu_count()
-    pool = multiprocessing.Pool(count)
-    try:
-      results = pool.map_async(CppLintWorker, commands).get(999999)
-    except KeyboardInterrupt:
-      print "\nCaught KeyboardInterrupt, terminating workers."
-      sys.exit(1)
+    process = subprocess.Popen(command, stderr=subprocess.PIPE)
+    LINT_ERROR_PATTERN = re.compile(r'^(.+)[:(]\d+[:)]')
+    while True:
+      out_line = process.stderr.readline()
+      if out_line == '' and process.poll() != None:
+        break
+      sys.stderr.write(out_line)
+      m = LINT_ERROR_PATTERN.match(out_line)
+      if m:
+        good_files_cache.RemoveFile(m.group(1))
 
-    for i in range(len(files)):
-      if results[i] > 0:
-        good_files_cache.RemoveFile(files[i])
-
-    total_errors = sum(results)
-    print "Total errors found: %d" % total_errors
     good_files_cache.Save()
-    return total_errors == 0
+    return process.returncode == 0
 
 
 COPYRIGHT_HEADER_PATTERN = re.compile(
diff --git a/tools/push-to-trunk.sh b/tools/push-to-trunk.sh
index 3fb5b34..761b733 100755
--- a/tools/push-to-trunk.sh
+++ b/tools/push-to-trunk.sh
@@ -1,5 +1,5 @@
 #!/bin/bash
-# Copyright 2012 the V8 project authors. All rights reserved.
+# Copyright 2011 the V8 project authors. All rights reserved.
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
@@ -31,13 +31,18 @@
 
 BRANCHNAME=prepare-push
 TRUNKBRANCH=trunk-push
+TEMP_BRANCH=v8-push-to-trunk-script-temporary-branch
+VERSION_FILE="src/version.cc"
 PERSISTFILE_BASENAME=/tmp/v8-push-to-trunk-tempfile
-CHROME_PATH=
+CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry"
+PATCH_FILE="$PERSISTFILE_BASENAME-patch"
+COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg"
+TOUCHED_FILES_FILE="$PERSISTFILE_BASENAME-touched-files"
+STEP=0
+
 
 ########## Function definitions
 
-source $(dirname $BASH_SOURCE)/common-includes.sh
-
 usage() {
 cat << EOF
 usage: $0 OPTIONS
@@ -49,24 +54,71 @@
   -h    Show this message
   -s    Specify the step where to start work. Default: 0.
   -l    Manually specify the git commit ID of the last push to trunk.
-  -c    Specify the path to your Chromium src/ directory to automate the
-        V8 roll.
 EOF
 }
 
+die() {
+  [[ -n "$1" ]] && echo "Error: $1"
+  echo "Exiting."
+  exit 1
+}
+
+confirm() {
+  echo -n "$1 [Y/n] "
+  read ANSWER
+  if [[ -z "$ANSWER" || "$ANSWER" == "Y" || "$ANSWER" == "y" ]] ; then
+    return 0
+  else
+    return 1
+  fi
+}
+
+delete_branch() {
+  local MATCH=$(git branch | grep $1 | awk '{print $NF}' )
+  if [ "$MATCH" == "$1" ] ; then
+    confirm "Branch $1 exists, do you want to delete it?"
+    if [ $? -eq 0 ] ; then
+      git branch -D $1 || die "Deleting branch '$1' failed."
+      echo "Branch $1 deleted."
+    else
+      die "Can't continue. Please delete branch $1 and try again."
+    fi
+  fi
+}
+
+# Persist and restore variables to support canceling/resuming execution
+# of this script.
+persist() {
+  local VARNAME=$1
+  local FILE="$PERSISTFILE_BASENAME-$VARNAME"
+  echo "${!VARNAME}" > $FILE
+}
+
+restore() {
+  local VARNAME=$1
+  local FILE="$PERSISTFILE_BASENAME-$VARNAME"
+  local VALUE="$(cat $FILE)"
+  eval "$VARNAME=\"$VALUE\""
+}
+
+restore_if_unset() {
+  local VARNAME=$1
+  [[ -z "${!VARNAME}" ]] && restore "$VARNAME"
+  [[ -z "${!VARNAME}" ]] && die "Variable '$VARNAME' could not be restored."
+}
+
+
 ########## Option parsing
 
-while getopts ":hs:l:c:" OPTION ; do
+while getopts ":hs:l:" OPTION ; do
   case $OPTION in
     h)  usage
         exit 0
         ;;
-    s)  START_STEP=$OPTARG
+    s)  STEP=$OPTARG
         ;;
     l)  LASTPUSH=$OPTARG
         ;;
-    c)  CHROME_PATH=$OPTARG
-        ;;
     ?)  echo "Illegal option: -$OPTARG"
         usage
         exit 1
@@ -77,24 +129,46 @@
 
 ########## Regular workflow
 
-initial_environment_checks
+# Cancel if this is not a git checkout.
+[[ -d .git ]] \
+  || die "This is not a git checkout, this script won't work for you."
 
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Preparation"
-  common_prepare
+# Cancel if EDITOR is unset or not executable.
+[[ -n "$EDITOR" && -x "$(which $EDITOR)" ]] \
+  || die "Please set your EDITOR environment variable, you'll need it."
+
+if [ $STEP -le 0 ] ; then
+  echo ">>> Step 0: Preparation"
+  # Check for a clean workdir.
+  [[ -z "$(git status -s -uno)" ]] \
+    || die "Workspace is not clean. Please commit or undo your changes."
+
+  # Persist current branch.
+  CURRENT_BRANCH=$(git status -s -b -uno | grep "^##" | awk '{print $2}')
+  persist "CURRENT_BRANCH"
+  # Get ahold of a safe temporary branch and check it out.
+  if [ "$CURRENT_BRANCH" != "$TEMP_BRANCH" ] ; then
+    delete_branch $TEMP_BRANCH
+    git checkout -b $TEMP_BRANCH
+  fi
+  # Delete branches if they exist.
+  delete_branch $BRANCHNAME
   delete_branch $TRUNKBRANCH
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Create a fresh branch."
+if [ $STEP -le 1 ] ; then
+  echo ">>> Step 1: Fetch unfetched revisions."
+  git svn fetch || die "'git svn fetch' failed."
+fi
+
+if [ $STEP -le 2 ] ; then
+  echo ">>> Step 2: Create a fresh branch."
   git checkout -b $BRANCHNAME svn/bleeding_edge \
     || die "Creating branch $BRANCHNAME failed."
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Detect commit ID of last push to trunk."
+if [ $STEP -le 3 ] ; then
+  echo ">>> Step 3: Detect commit ID of last push to trunk."
   [[ -n "$LASTPUSH" ]] || LASTPUSH=$(git log -1 --format=%H ChangeLog)
   LOOP=1
   while [ $LOOP -eq 1 ] ; do
@@ -110,11 +184,15 @@
   persist "LASTPUSH"
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Prepare raw ChangeLog entry."
-  # These version numbers are used again later for the trunk commit.
-  read_and_persist_version
+if [ $STEP -le 4 ] ; then
+  echo ">>> Step 4: Prepare raw ChangeLog entry."
+# These version numbers are used again later for the trunk commit.
+  MAJOR=$(grep "#define MAJOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
+  persist "MAJOR"
+  MINOR=$(grep "#define MINOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
+  persist "MINOR"
+  BUILD=$(grep "#define BUILD_NUMBER" "$VERSION_FILE" | awk '{print $NF}')
+  persist "BUILD"
 
   DATE=$(date +%Y-%m-%d)
   persist "DATE"
@@ -124,25 +202,18 @@
   for commit in $COMMITS ; do
     # Get the commit's title line.
     git log -1 $commit --format="%w(80,8,8)%s" >> "$CHANGELOG_ENTRY_FILE"
-    # Grep for "BUG=xxxx" lines in the commit message and convert them to
-    # "(issue xxxx)".
-    git log -1 $commit --format="%B" \
-        | grep "^BUG=" | grep -v "BUG=$" | grep -v "BUG=none$" \
-        | sed -e 's/^/        /' \
-        | sed -e 's/BUG=v8:\(.*\)$/(issue \1)/' \
-        | sed -e 's/BUG=\(.*\)$/(Chromium issue \1)/' \
-        >> "$CHANGELOG_ENTRY_FILE"
+    # Grep for "BUG=xxxx" lines in the commit message.
+    git log -1 $commit --format="%b" | grep BUG= | grep -v "BUG=$" \
+                                     | sed -e 's/^/        /' \
+                                     >> "$CHANGELOG_ENTRY_FILE"
     # Append the commit's author for reference.
     git log -1 $commit --format="%w(80,8,8)(%an)" >> "$CHANGELOG_ENTRY_FILE"
     echo "" >> "$CHANGELOG_ENTRY_FILE"
   done
-  echo "        Performance and stability improvements on all platforms." \
-    >> "$CHANGELOG_ENTRY_FILE"
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Edit ChangeLog entry."
+if [ $STEP -le 5 ] ; then
+  echo ">>> Step 5: Edit ChangeLog entry."
   echo -n "Please press <Return> to have your EDITOR open the ChangeLog entry, \
 then edit its contents to your liking. When you're done, save the file and \
 exit your EDITOR. "
@@ -150,13 +221,7 @@
   $EDITOR "$CHANGELOG_ENTRY_FILE"
   NEWCHANGELOG=$(mktemp)
   # Eliminate any trailing newlines by going through a shell variable.
-  # Also (1) eliminate tabs, (2) fix too little and (3) too much indentation,
-  # and (4) eliminate trailing whitespace.
-  CHANGELOGENTRY=$(cat "$CHANGELOG_ENTRY_FILE" \
-                   | sed -e 's/\t/        /g' \
-                   | sed -e 's/^ \{1,7\}\([^ ]\)/        \1/g' \
-                   | sed -e 's/^ \{9,80\}\([^ ]\)/        \1/g' \
-                   | sed -e 's/ \+$//')
+  CHANGELOGENTRY=$(cat "$CHANGELOG_ENTRY_FILE")
   [[ -n "$CHANGELOGENTRY" ]] || die "Empty ChangeLog entry."
   echo "$CHANGELOGENTRY" > "$NEWCHANGELOG"
   echo "" >> "$NEWCHANGELOG" # Explicitly insert two empty lines.
@@ -165,9 +230,8 @@
   mv "$NEWCHANGELOG" ChangeLog
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Increment version number."
+if [ $STEP -le 6 ] ; then
+  echo ">>> Step 6: Increment version number."
   restore_if_unset "BUILD"
   NEWBUILD=$(($BUILD + 1))
   confirm "Automatically increment BUILD_NUMBER? (Saying 'n' will fire up \
@@ -179,26 +243,42 @@
   else
     $EDITOR "$VERSION_FILE"
   fi
-  read_and_persist_version "NEW"
+  NEWMAJOR=$(grep "#define MAJOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
+  persist "NEWMAJOR"
+  NEWMINOR=$(grep "#define MINOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
+  persist "NEWMINOR"
+  NEWBUILD=$(grep "#define BUILD_NUMBER" "$VERSION_FILE" | awk '{print $NF}')
+  persist "NEWBUILD"
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Commit to local branch."
-  restore_version_if_unset "NEW"
-  PREPARE_COMMIT_MSG="Prepare push to trunk.  \
-Now working on version $NEWMAJOR.$NEWMINOR.$NEWBUILD."
-  persist "PREPARE_COMMIT_MSG"
-  git commit -a -m "$PREPARE_COMMIT_MSG" \
+if [ $STEP -le 7 ] ; then
+  echo ">>> Step 7: Commit to local branch."
+  restore_if_unset "NEWMAJOR"
+  restore_if_unset "NEWMINOR"
+  restore_if_unset "NEWBUILD"
+  git commit -a -m "Prepare push to trunk.  \
+Now working on version $NEWMAJOR.$NEWMINOR.$NEWBUILD." \
     || die "'git commit -a' failed."
 fi
 
-upload_step
+if [ $STEP -le 8 ] ; then
+  echo ">>> Step 8: Upload for code review."
+  echo -n "Please enter the email address of a V8 reviewer for your patch: "
+  read REVIEWER
+  git cl upload -r $REVIEWER --send-mail \
+    || die "'git cl upload' failed, please try again."
+fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Commit to the repository."
-  wait_for_lgtm
+if [ $STEP -le 9 ] ; then
+  echo ">>> Step 9: Commit to the repository."
+  echo "Please wait for an LGTM, then type \"LGTM<Return>\" to commit your \
+change. (If you need to iterate on the patch, do so in another shell.)"
+  unset ANSWER
+  while [ "$ANSWER" != "LGTM" ] ; do
+    [[ -n "$ANSWER" ]] && echo "That was not 'LGTM'."
+    echo -n "> "
+    read ANSWER
+  done
   # Re-read the ChangeLog entry (to pick up possible changes).
   cat ChangeLog | awk --posix '{
     if ($0 ~ /^[0-9]{4}-[0-9]{2}-[0-9]{2}:/) {
@@ -213,24 +293,16 @@
   git cl dcommit || die "'git cl dcommit' failed, please try again."
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Fetch straggler commits that sneaked in \
-since this script was started."
-  git svn fetch || die "'git svn fetch' failed."
-  git checkout svn/bleeding_edge
-  restore_if_unset "PREPARE_COMMIT_MSG"
-  PREPARE_COMMIT_HASH=$(git log -1 --format=%H --grep="$PREPARE_COMMIT_MSG")
-  persist "PREPARE_COMMIT_HASH"
+if [ $STEP -le 10 ] ; then
+  echo ">>> Step 10: NOP"
+  # Present in the manual guide, not necessary (even harmful!) for this script.
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Squash commits into one."
+if [ $STEP -le 11 ] ; then
+  echo ">>> Step 11: Squash commits into one."
   # Instead of relying on "git rebase -i", we'll just create a diff, because
   # that's easier to automate.
-  restore_if_unset "PREPARE_COMMIT_HASH"
-  git diff svn/trunk $PREPARE_COMMIT_HASH > "$PATCH_FILE"
+  git diff svn/trunk > "$PATCH_FILE"
   # Convert the ChangeLog entry to commit message format:
   # - remove date
   # - remove indentation
@@ -252,29 +324,54 @@
           need_space = 1;
         }
       }' > "$COMMITMSG_FILE" || die "Commit message editing failed."
+  LOOP=1
+  while [ $LOOP -eq 1 ] ; do
+    echo "This is the trunk commit message:"
+    echo "--------------------"
+    cat "$COMMITMSG_FILE"
+    echo -e "\n--------------------"
+    confirm "Does this look good to you? (Saying 'n' will fire up your \
+EDITOR so you can change the commit message. When you're done, save the \
+file and exit your EDITOR.)"
+    if [ $? -eq 0 ] ; then
+      LOOP=0
+    else
+      $EDITOR "$COMMITMSG_FILE"
+    fi
+  done
   rm -f "$CHANGELOG_ENTRY_FILE"
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Create a new branch from trunk."
+if [ $STEP -le 12 ] ; then
+  echo ">>> Step 12: Create a new branch from trunk."
   git checkout -b $TRUNKBRANCH svn/trunk \
     || die "Checking out a new branch '$TRUNKBRANCH' failed."
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Apply squashed changes."
-  rm -f "$TOUCHED_FILES_FILE"
-  apply_patch "$PATCH_FILE"
-  stage_files
+if [ $STEP -le 13 ] ; then
+  echo ">>> Step 13: Apply squashed changes."
+  patch -p1 < "$PATCH_FILE" | tee >(awk '{print $NF}' >> "$TOUCHED_FILES_FILE")
+  [[ $? -eq 0 ]] || die "Applying the patch to trunk failed."
+  # Stage added and modified files.
+  TOUCHED_FILES=$(cat "$TOUCHED_FILES_FILE")
+  for FILE in $TOUCHED_FILES ; do
+    git add "$FILE"
+  done
+  # Stage deleted files.
+  DELETED_FILES=$(git status -s -uno --porcelain | grep "^ D" \
+                                                 | awk '{print $NF}')
+  for FILE in $DELETED_FILES ; do
+    git rm "$FILE"
+  done
   rm -f "$PATCH_FILE"
+  rm -f "$TOUCHED_FILES_FILE"
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Set correct version for trunk."
-  restore_version_if_unset
+if [ $STEP -le 14 ] ; then
+  echo ">>> Step 14: Set correct version for trunk."
+  restore_if_unset "MAJOR"
+  restore_if_unset "MINOR"
+  restore_if_unset "BUILD"
   sed -e "/#define MAJOR_VERSION/s/[0-9]*$/$MAJOR/" \
       -e "/#define MINOR_VERSION/s/[0-9]*$/$MINOR/" \
       -e "/#define BUILD_NUMBER/s/[0-9]*$/$BUILD/" \
@@ -283,110 +380,52 @@
       -i "$VERSION_FILE" || die "Patching $VERSION_FILE failed."
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Commit to local trunk branch."
+if [ $STEP -le 15 ] ; then
+  echo ">>> Step 15: Commit to local trunk branch."
   git add "$VERSION_FILE"
   git commit -F "$COMMITMSG_FILE" || die "'git commit' failed."
   rm -f "$COMMITMSG_FILE"
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Sanity check."
+if [ $STEP -le 16 ] ; then
+  echo ">>> Step 16: Sanity check."
   confirm "Please check if your local checkout is sane: Inspect $VERSION_FILE, \
 compile, run tests. Do you want to commit this new trunk revision to the \
 repository?"
   [[ $? -eq 0 ]] || die "Execution canceled."
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Commit to SVN."
-  git svn dcommit | tee >(grep -E "^Committed r[0-9]+" \
-                          | sed -e 's/^Committed r\([0-9]\+\)/\1/' \
-                          > "$TRUNK_REVISION_FILE") \
-    || die "'git svn dcommit' failed."
-  TRUNK_REVISION=$(cat "$TRUNK_REVISION_FILE")
-  persist "TRUNK_REVISION"
-  rm -f "$TRUNK_REVISION_FILE"
+if [ $STEP -le 17 ] ; then
+  echo ">>> Step 17. Commit to SVN."
+  git svn dcommit || die "'git svn dcommit' failed."
 fi
 
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Tag the new revision."
-  restore_version_if_unset
+if [ $STEP -le 18 ] ; then
+  echo ">>> Step 18: Tag the new revision."
+  restore_if_unset "MAJOR"
+  restore_if_unset "MINOR"
+  restore_if_unset "BUILD"
   git svn tag $MAJOR.$MINOR.$BUILD -m "Tagging version $MAJOR.$MINOR.$BUILD" \
     || die "'git svn tag' failed."
 fi
 
-if [ -n "$CHROME_PATH" ] ; then
-
-  let CURRENT_STEP+=1
-  if [ $START_STEP -le $CURRENT_STEP ] ; then
-    echo ">>> Step $CURRENT_STEP: Switch to Chromium checkout."
-    V8_PATH=$(pwd)
-    persist "V8_PATH"
-    cd "$CHROME_PATH"
-    initial_environment_checks
-    # Check for a clean workdir.
-    [[ -z "$(git status -s -uno)" ]] \
-      || die "Workspace is not clean. Please commit or undo your changes."
-    # Assert that the DEPS file is there.
-    [[ -w "DEPS" ]] || die "DEPS file not present or not writable; \
-current directory is: $(pwd)."
-  fi
-
-  let CURRENT_STEP+=1
-  if [ $START_STEP -le $CURRENT_STEP ] ; then
-    echo ">>> Step $CURRENT_STEP: Update the checkout and create a new branch."
-    git checkout master || die "'git checkout master' failed."
-    git pull || die "'git pull' failed, please try again."
-    restore_if_unset "TRUNK_REVISION"
-    git checkout -b "v8-roll-$TRUNK_REVISION" \
-      || die "Failed to checkout a new branch."
-  fi
-
-  let CURRENT_STEP+=1
-  if [ $START_STEP -le $CURRENT_STEP ] ; then
-    echo ">>> Step $CURRENT_STEP: Create and upload CL."
-    # Patch DEPS file.
-    sed -r -e "/\"v8_revision\": /s/\"[0-9]+\"/\"$TRUNK_REVISION\"/" \
-        -i DEPS
-    restore_version_if_unset
-    echo -n "Please enter the email address of a reviewer for the roll CL: "
-    read REVIEWER
-    git commit -am "Update V8 to version $MAJOR.$MINOR.$BUILD.
-
-TBR=$REVIEWER" || die "'git commit' failed."
-    git cl upload --send-mail \
-      || die "'git cl upload' failed, please try again."
-    echo "CL uploaded."
-  fi
-
-  let CURRENT_STEP+=1
-  if [ $START_STEP -le $CURRENT_STEP ] ; then
-    echo ">>> Step $CURRENT_STEP: Returning to V8 checkout."
-    restore_if_unset "V8_PATH"
-    cd "$V8_PATH"
-  fi
-fi  # if [ -n "$CHROME_PATH" ]
-
-let CURRENT_STEP+=1
-if [ $START_STEP -le $CURRENT_STEP ] ; then
-  echo ">>> Step $CURRENT_STEP: Done!"
-  restore_version_if_unset
-  restore_if_unset "TRUNK_REVISION"
-  if [ -n "$CHROME_PATH" ] ; then
-    echo "Congratulations, you have successfully created the trunk revision \
-$MAJOR.$MINOR.$BUILD and rolled it into Chromium. Please don't forget to \
-update the v8rel spreadsheet:"
-  else
-    echo "Congratulations, you have successfully created the trunk revision \
-$MAJOR.$MINOR.$BUILD. Please don't forget to roll this new version into \
-Chromium, and to update the v8rel spreadsheet:"
-  fi
-  echo -e "$MAJOR.$MINOR.$BUILD\ttrunk\t$TRUNK_REVISION"
-  common_cleanup
+if [ $STEP -le 19 ] ; then
+  echo ">>> Step 19: Cleanup."
+  restore_if_unset "CURRENT_BRANCH"
+  git checkout -f $CURRENT_BRANCH
+  [[ "$TEMP_BRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TEMP_BRANCH
+  [[ "$BRANCHNAME" != "$CURRENT_BRANCH" ]] && git branch -D $BRANCHNAME
   [[ "$TRUNKBRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TRUNKBRANCH
 fi
+
+if [ $STEP -le 20 ] ; then
+  echo ">>> Step 20: Done!"
+  restore_if_unset "MAJOR"
+  restore_if_unset "MINOR"
+  restore_if_unset "BUILD"
+  echo "Congratulations, you have successfully created the trunk revision \
+$MAJOR.$MINOR.$BUILD. Please don't forget to update the v8rel spreadsheet, \
+and to roll this new version into Chromium."
+  # Clean up all temporary files.
+  rm -f "$PERSISTFILE_BASENAME"*
+fi
diff --git a/tools/test-wrapper-gypbuild.py b/tools/test-wrapper-gypbuild.py
index fda4105..ad5449a 100755
--- a/tools/test-wrapper-gypbuild.py
+++ b/tools/test-wrapper-gypbuild.py
@@ -73,8 +73,6 @@
       choices=PROGRESS_INDICATORS, default="mono")
   result.add_option("--report", help="Print a summary of the tests to be run",
       default=False, action="store_true")
-  result.add_option("--download-data", help="Download missing test suite data",
-      default=False, action="store_true")
   result.add_option("-s", "--suite", help="A test suite",
       default=[], action="append")
   result.add_option("-t", "--timeout", help="Timeout in seconds",
@@ -133,22 +131,18 @@
 
 
 def ProcessOptions(options):
-  if options.arch_and_mode == ".":
-    options.arch = []
-    options.mode = []
-  else:
-    if options.arch_and_mode != None and options.arch_and_mode != "":
-      tokens = options.arch_and_mode.split(".")
-      options.arch = tokens[0]
-      options.mode = tokens[1]
-    options.mode = options.mode.split(',')
-    options.arch = options.arch.split(',')
+  if options.arch_and_mode != None and options.arch_and_mode != "":
+    tokens = options.arch_and_mode.split(".")
+    options.arch = tokens[0]
+    options.mode = tokens[1]
+  options.mode = options.mode.split(',')
   for mode in options.mode:
     if not mode in ['debug', 'release']:
       print "Unknown mode %s" % mode
       return False
+  options.arch = options.arch.split(',')
   for arch in options.arch:
-    if not arch in ['ia32', 'x64', 'arm', 'mips']:
+    if not arch in ['ia32', 'x64', 'arm']:
       print "Unknown architecture %s" % arch
       return False
 
@@ -163,8 +157,6 @@
     result += ['--progress=' + options.progress]
   if options.report:
     result += ['--report']
-  if options.download_data:
-    result += ['--download-data']
   if options.suite != []:
     for suite in options.suite:
       result += ['--suite=../../test/' + suite]
@@ -173,7 +165,7 @@
   if options.snapshot:
     result += ['--snapshot']
   if options.special_command:
-    result += ['--special-command="%s"' % options.special_command]
+    result += ['--special-command=' + options.special_command]
   if options.valgrind:
     result += ['--valgrind']
   if options.cat:
@@ -197,9 +189,9 @@
   if options.crankshaft:
     result += ['--crankshaft']
   if options.shard_count != 1:
-    result += ['--shard-count=%s' % options.shard_count]
+    result += ['--shard_count=%s' % options.shard_count]
   if options.shard_run != 1:
-    result += ['--shard-run=%s' % options.shard_run]
+    result += ['--shard_run=%s' % options.shard_run]
   if options.noprof:
     result += ['--noprof']
   return result
@@ -240,18 +232,6 @@
                                env=env)
       returncodes += child.wait()
 
-  if len(options.mode) == 0 and len(options.arch) == 0:
-    print ">>> running tests"
-    shellpath = workspace + '/' + options.outdir
-    env['LD_LIBRARY_PATH'] = shellpath + '/lib.target'
-    shell = shellpath + '/d8'
-    child = subprocess.Popen(' '.join(args_for_children +
-                                      ['--shell=' + shell]),
-                             shell=True,
-                             cwd=workspace,
-                             env=env)
-    returncodes = child.wait()
-
   return returncodes
 
 
diff --git a/tools/test.py b/tools/test.py
index 0aacd99..ecc0062 100755
--- a/tools/test.py
+++ b/tools/test.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 #
-# Copyright 2012 the V8 project authors. All rights reserved.
+# Copyright 2008 the V8 project authors. All rights reserved.
 # Redistribution and use in source and binary forms, with or without
 # modification, are permitted provided that the following conditions are
 # met:
@@ -472,7 +472,7 @@
   popen_args = args
   prev_error_mode = SEM_INVALID_VALUE
   if utils.IsWindows():
-    popen_args = subprocess.list2cmdline(args)
+    popen_args = '"' + subprocess.list2cmdline(args) + '"'
     if context.suppress_dialogs:
       # Try to change the error mode to avoid dialogs on fatal errors. Don't
       # touch any existing error mode flags by merging the existing error mode.
@@ -631,15 +631,9 @@
   def GetBuildRequirements(self, path, context):
     return self.GetConfiguration(context).GetBuildRequirements()
 
-  def DownloadData(self, context):
-    config = self.GetConfiguration(context)
-    if 'DownloadData' in dir(config):
-      config.DownloadData()
-
   def AddTestsToList(self, result, current_path, path, context, mode):
-    config = self.GetConfiguration(context)
-    for v in config.VariantFlags():
-      tests = config.ListTests(current_path, path, mode, v)
+    for v in self.GetConfiguration(context).VariantFlags():
+      tests = self.GetConfiguration(context).ListTests(current_path, path, mode, v)
       for t in tests: t.variant_flags = v
       result += tests
 
@@ -661,12 +655,6 @@
         result += test.GetBuildRequirements(rest, context)
     return result
 
-  def DownloadData(self, path, context):
-    (name, rest) = CarCdr(path)
-    for test in self.tests:
-      if not name or name.match(test.GetName()):
-        test.DownloadData(context)
-
   def ListTests(self, current_path, path, context, mode, variant_flags):
     (name, rest) = CarCdr(path)
     result = [ ]
@@ -686,8 +674,8 @@
     'debug'   : '_g',
     'release' : '' }
 FLAGS = {
-    'debug'   : ['--nobreak-on-abort', '--enable-slow-asserts', '--debug-code', '--verify-heap'],
-    'release' : ['--nobreak-on-abort']}
+    'debug'   : ['--enable-slow-asserts', '--debug-code', '--verify-heap'],
+    'release' : []}
 TIMEOUT_SCALEFACTOR = {
     'debug'   : 4,
     'release' : 1 }
@@ -723,7 +711,7 @@
   def GetTimeout(self, testcase, mode):
     result = self.timeout * TIMEOUT_SCALEFACTOR[mode]
     if '--stress-opt' in self.GetVmFlags(testcase, mode):
-      return result * 4
+      return result * 2
     else:
       return result
 
@@ -862,9 +850,6 @@
     elif self.op == '==':
       inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs))
       return not inter.IsEmpty()
-    elif self.op == '!=':
-      inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs))
-      return inter.IsEmpty()
     else:
       assert self.op == '&&'
       return self.left.Evaluate(env, defs) and self.right.Evaluate(env, defs)
@@ -947,9 +932,6 @@
       elif self.Current(2) == '==':
         self.AddToken('==')
         self.Advance(2)
-      elif self.Current(2) == '!=':
-        self.AddToken('!=')
-        self.Advance(2)
       else:
         return None
     return self.tokens
@@ -1002,7 +984,7 @@
     return None
 
 
-BINARIES = ['==', '!=']
+BINARIES = ['==']
 def ParseOperatorExpression(scan):
   left = ParseAtomicExpression(scan)
   if not left: return None
@@ -1024,7 +1006,7 @@
     right = ParseOperatorExpression(scan)
     if not right:
       return None
-    left = Operation(left, 'if', right)
+    left=  Operation(left, 'if', right)
   return left
 
 
@@ -1204,8 +1186,6 @@
       default='scons')
   result.add_option("--report", help="Print a summary of the tests to be run",
       default=False, action="store_true")
-  result.add_option("--download-data", help="Download missing test suite data",
-      default=False, action="store_true")
   result.add_option("-s", "--suite", help="A test suite",
       default=[], action="append")
   result.add_option("-t", "--timeout", help="Timeout in seconds",
@@ -1231,7 +1211,6 @@
         dest="suppress_dialogs", default=True, action="store_true")
   result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests",
         dest="suppress_dialogs", action="store_false")
-  result.add_option("--mips-arch-variant", help="mips architecture variant: mips32r1/mips32r2", default="mips32r2");
   result.add_option("--shell", help="Path to V8 shell", default="d8")
   result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true")
   result.add_option("--store-unexpected-output",
@@ -1293,9 +1272,6 @@
   if options.snapshot:
     options.scons_flags.append("snapshot=on")
   global VARIANT_FLAGS
-  if options.mips_arch_variant:
-    options.scons_flags.append("mips_arch_variant=" + options.mips_arch_variant)
-
   if options.stress_only:
     VARIANT_FLAGS = [['--stress-opt', '--always-opt']]
   if options.nostress:
@@ -1476,11 +1452,6 @@
   root.GetTestStatus(context, sections, defs)
   config = Configuration(sections, defs)
 
-  # Download missing test suite data if requested.
-  if options.download_data:
-    for path in paths:
-      root.DownloadData(path, context)
-
   # List the tests
   all_cases = [ ]
   all_unused = [ ]
diff --git a/tools/tickprocessor-driver.js b/tools/tickprocessor-driver.js
index 9af5ab6..4201e43 100644
--- a/tools/tickprocessor-driver.js
+++ b/tools/tickprocessor-driver.js
@@ -52,7 +52,6 @@
 var tickProcessor = new TickProcessor(
   new (entriesProviders[params.platform])(params.nm),
   params.separateIc,
-  params.callGraphSize,
   params.ignoreUnknown,
   params.stateFilter,
   snapshotLogProcessor);
diff --git a/tools/tickprocessor.js b/tools/tickprocessor.js
index 05a3369..5f57835 100644
--- a/tools/tickprocessor.js
+++ b/tools/tickprocessor.js
@@ -146,12 +146,7 @@
 
 
 function TickProcessor(
-    cppEntriesProvider,
-    separateIc,
-    callGraphSize,
-    ignoreUnknown,
-    stateFilter,
-    snapshotLogProcessor) {
+    cppEntriesProvider, separateIc, ignoreUnknown, stateFilter, snapshotLogProcessor) {
   LogReader.call(this, {
       'shared-library': { parsers: [null, parseInt, parseInt],
           processor: this.processSharedLibrary },
@@ -186,7 +181,6 @@
       'end-code-region': null });
 
   this.cppEntriesProvider_ = cppEntriesProvider;
-  this.callGraphSize_ = callGraphSize;
   this.ignoreUnknown_ = ignoreUnknown;
   this.stateFilter_ = stateFilter;
   this.snapshotLogProcessor_ = snapshotLogProcessor;
@@ -246,7 +240,6 @@
 
 TickProcessor.CALL_PROFILE_CUTOFF_PCT = 2.0;
 
-TickProcessor.CALL_GRAPH_SIZE = 5;
 
 /**
  * @override
@@ -542,7 +535,7 @@
           padLeft(rec.parentTotalPercent.toFixed(1), 5) + '%  ' +
           indentStr + rec.internalFuncName);
     // Limit backtrace depth.
-    if (indent < 2 * self.callGraphSize_) {
+    if (indent < 10) {
       self.printHeavyProfile(rec.children, indent + 2);
     }
     // Delimit top-level functions.
@@ -771,8 +764,6 @@
         'Show only ticks from OTHER VM state'],
     '-e': ['stateFilter', TickProcessor.VmStates.EXTERNAL,
         'Show only ticks from EXTERNAL VM state'],
-    '--call-graph-size': ['callGraphSize', TickProcessor.CALL_GRAPH_SIZE,
-        'Set the call graph size'],
     '--ignore-unknown': ['ignoreUnknown', true,
         'Exclude ticks of unknown code entries from processing'],
     '--separate-ic': ['separateIc', true,
@@ -801,7 +792,6 @@
   snapshotLogFileName: null,
   platform: 'unix',
   stateFilter: null,
-  callGraphSize: 5,
   ignoreUnknown: false,
   separateIc: false,
   nm: 'nm'
diff --git a/tools/utils.py b/tools/utils.py
index 232314c..fb94d14 100644
--- a/tools/utils.py
+++ b/tools/utils.py
@@ -61,8 +61,6 @@
     return 'openbsd'
   elif id == 'SunOS':
     return 'solaris'
-  elif id == 'NetBSD':
-    return 'netbsd'
   else:
     return None