Version 1.2.13.

Fixed issue 397, issue 398, and issue 399.

Added support for breakpoint groups.

Fixed bugs introduced with the new global object representation.

Fixed a few bugs in the ARM code generator.


git-svn-id: http://v8.googlecode.com/svn/trunk@2406 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index a3176c2..3f7ccf5 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -2421,16 +2421,22 @@
   Comment cmnt(masm_, "[ Conditional");
   JumpTarget then;
   JumpTarget else_;
-  JumpTarget exit;
   LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF,
                         &then, &else_, true);
-  Branch(false, &else_);
-  then.Bind();
-  LoadAndSpill(node->then_expression(), typeof_state());
-  exit.Jump();
-  else_.Bind();
-  LoadAndSpill(node->else_expression(), typeof_state());
-  exit.Bind();
+  if (has_valid_frame()) {
+    Branch(false, &else_);
+  }
+  if (has_valid_frame() || then.is_linked()) {
+    then.Bind();
+    LoadAndSpill(node->then_expression(), typeof_state());
+  }
+  if (else_.is_linked()) {
+    JumpTarget exit;
+    if (has_valid_frame()) exit.Jump();
+    else_.Bind();
+    LoadAndSpill(node->else_expression(), typeof_state());
+    if (exit.is_linked()) exit.Bind();
+  }
   ASSERT(frame_->height() == original_height + 1);
 }
 
@@ -3453,8 +3459,22 @@
 void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
   VirtualFrame::SpilledScope spilled_scope;
   ASSERT(args->length() == 0);
-  frame_->CallRuntime(Runtime::kIsConstructCall, 0);
-  frame_->EmitPush(r0);
+
+  // Get the frame pointer for the calling frame.
+  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+
+  // Skip the arguments adaptor frame if it exists.
+  Label check_frame_marker;
+  __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
+  __ cmp(r1, Operand(ArgumentsAdaptorFrame::SENTINEL));
+  __ b(ne, &check_frame_marker);
+  __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
+
+  // Check the marker in the calling frame.
+  __ bind(&check_frame_marker);
+  __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
+  __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
+  cc_reg_ = eq;
 }
 
 
@@ -3591,7 +3611,9 @@
                           false_target(),
                           true_target(),
                           true);
-    cc_reg_ = NegateCondition(cc_reg_);
+    // LoadCondition may (and usually does) leave a test and branch to
+    // be emitted by the caller.  In that case, negate the condition.
+    if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
 
   } else if (op == Token::DELETE) {
     Property* property = node->expression()->AsProperty();
diff --git a/src/arm/disasm-arm.cc b/src/arm/disasm-arm.cc
index c55a958..d193ab9 100644
--- a/src/arm/disasm-arm.cc
+++ b/src/arm/disasm-arm.cc
@@ -506,17 +506,25 @@
         // multiply instructions
         if (instr->Bit(23) == 0) {
           if (instr->Bit(21) == 0) {
-            // Mul calls it Rd.  Everyone else calls it Rn.
+            // The MUL instruction description (A 4.1.33) refers to Rd as being
+            // the destination for the operation, but it confusingly uses the
+            // Rn field to encode it.
             Format(instr, "mul'cond's 'rn, 'rm, 'rs");
           } else {
-            // In the manual the order is rd, rm, rs, rn.  But mla swaps the
-            // positions of rn and rd in the encoding.
+            // The MLA instruction description (A 4.1.28) refers to the order
+            // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the
+            // Rn field to encode the Rd register and the Rd field to encode
+            // the Rn register.
             Format(instr, "mla'cond's 'rn, 'rm, 'rs, 'rd");
           }
         } else {
-          // In the manual the order is RdHi, RdLo, Rm, Rs.
-          // RdHi is what other instructions call Rn and RdLo is Rd.
-          Format(instr, "'um'al'cond's 'rn, 'rd, 'rm, 'rs");
+          // The signed/long multiply instructions use the terms RdHi and RdLo
+          // when referring to the target registers. They are mapped to the Rn
+          // and Rd fields as follows:
+          // RdLo == Rd field
+          // RdHi == Rn field
+          // The order of registers is: <RdLo>, <RdHi>, <Rm>, <Rs>
+          Format(instr, "'um'al'cond's 'rd, 'rn, 'rm, 'rs");
         }
       } else {
         Unknown(instr);  // not used by V8
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 3d6b8cb..47e2749 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -290,11 +290,24 @@
   // Align the stack at this point.  After this point we have 5 pushes,
   // so in fact we have to unalign here!  See also the assert on the
   // alignment immediately below.
-  if (OS::ActivationFrameAlignment() != kPointerSize) {
+#if defined(V8_HOST_ARCH_ARM)
+  // Running on the real platform. Use the alignment as mandated by the local
+  // environment.
+  // Note: This will break if we ever start generating snapshots on one ARM
+  // platform for another ARM platform with a different alignment.
+  int activation_frame_alignment = OS::ActivationFrameAlignment();
+#else  // defined(V8_HOST_ARCH_ARM)
+  // If we are using the simulator then we should always align to the expected
+  // alignment. As the simulator is used to generate snapshots we do not know
+  // if the target platform will need alignment, so we will always align at
+  // this point here.
+  int activation_frame_alignment = 2 * kPointerSize;
+#endif  // defined(V8_HOST_ARCH_ARM)
+  if (activation_frame_alignment != kPointerSize) {
     // This code needs to be made more general if this assert doesn't hold.
-    ASSERT(OS::ActivationFrameAlignment() == 2 * kPointerSize);
+    ASSERT(activation_frame_alignment == 2 * kPointerSize);
     mov(r7, Operand(Smi::FromInt(0)));
-    tst(sp, Operand(OS::ActivationFrameAlignment() - 1));
+    tst(sp, Operand(activation_frame_alignment - 1));
     push(r7, eq);  // Conditional push instruction.
   }
 
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index 1b42919..e5500aa 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -1080,25 +1080,44 @@
     // multiply instruction or extra loads and stores
     if (instr->Bits(7, 4) == 9) {
       if (instr->Bit(24) == 0) {
-        // Multiply instructions have Rd in a funny place.
-        int rd = instr->RnField();
+        // Raw field decoding here. Multiply instructions have their Rd in
+        // funny places.
+        int rn = instr->RnField();
         int rm = instr->RmField();
         int rs = instr->RsField();
         int32_t rs_val = get_register(rs);
         int32_t rm_val = get_register(rm);
         if (instr->Bit(23) == 0) {
           if (instr->Bit(21) == 0) {
+            // The MUL instruction description (A 4.1.33) refers to Rd as being
+            // the destination for the operation, but it confusingly uses the
+            // Rn field to encode it.
             // Format(instr, "mul'cond's 'rn, 'rm, 'rs");
+            int rd = rn;  // Remap the rn field to the Rd register.
             int32_t alu_out = rm_val * rs_val;
             set_register(rd, alu_out);
             if (instr->HasS()) {
               SetNZFlags(alu_out);
             }
           } else {
-            UNIMPLEMENTED();  // mla is not used by V8.
+            // The MLA instruction description (A 4.1.28) refers to the order
+            // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the
+            // Rn field to encode the Rd register and the Rd field to encode
+            // the Rn register.
+            Format(instr, "mla'cond's 'rn, 'rm, 'rs, 'rd");
           }
         } else {
-          // Format(instr, "'um'al'cond's 'rn, 'rd, 'rs, 'rm");
+          // The signed/long multiply instructions use the terms RdHi and RdLo
+          // when referring to the target registers. They are mapped to the Rn
+          // and Rd fields as follows:
+          // RdLo == Rd
+          // RdHi == Rn (This is confusingly stored in variable rd here
+          //             because the mul instruction from above uses the
+          //             Rn field to encode the Rd register. Good luck figuring
+          //             this out without reading the ARM instruction manual
+          //             at a very detailed level.)
+          // Format(instr, "'um'al'cond's 'rd, 'rn, 'rs, 'rm");
+          int rd_hi = rn;  // Remap the rn field to the RdHi register.
           int rd_lo = instr->RdField();
           int32_t hi_res = 0;
           int32_t lo_res = 0;
@@ -1117,7 +1136,7 @@
             lo_res = static_cast<int32_t>(result & 0xffffffff);
           }
           set_register(rd_lo, lo_res);
-          set_register(rd, hi_res);
+          set_register(rd_hi, hi_res);
           if (instr->HasS()) {
             UNIMPLEMENTED();
           }
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 8ef4956..c64b92a 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -134,7 +134,7 @@
 }
 
 
-// Pending fixups are code positions that have refer to builtin code
+// Pending fixups are code positions that refer to builtin code
 // objects that were not available at the time the code was generated.
 // The pending list is processed whenever an environment has been
 // created.
@@ -216,7 +216,6 @@
         *reinterpret_cast<Object**>(pc) = f->code();
       }
     } else {
-      ASSERT(is_pc_relative);
       Assembler::set_target_address_at(pc, f->code()->instruction_start());
     }
 
@@ -1556,7 +1555,7 @@
   // will always do unlinking.
   previous_ = current_;
   current_  = this;
-  result_ = NULL;
+  result_ = Handle<Context>::null();
 
   // If V8 isn't running and cannot be initialized, just return.
   if (!V8::IsRunning() && !V8::Initialize(NULL)) return;
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 37bc707..9c24c60 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -85,7 +85,7 @@
             Handle<NumberDictionary>(Heap::code_stubs()),
             key,
             code);
-    Heap::set_code_stubs(*dict);
+    Heap::public_set_code_stubs(*dict);
     index = Heap::code_stubs()->FindEntry(key);
   }
   ASSERT(index != NumberDictionary::kNotFound);
diff --git a/src/compilation-cache.cc b/src/compilation-cache.cc
index fd706af..0951af1 100644
--- a/src/compilation-cache.cc
+++ b/src/compilation-cache.cc
@@ -281,6 +281,7 @@
   HandleScope scope;
   ASSERT(boilerplate->IsBoilerplate());
   Handle<CompilationCacheTable> table = GetTable(0);
+  // TODO(X64): -fstrict-aliasing causes a problem with table.  Fix it.
   CALL_HEAP_FUNCTION_VOID(table->Put(*source, *boilerplate));
 }
 
diff --git a/src/date-delay.js b/src/date-delay.js
index 3414cb9..6adde46 100644
--- a/src/date-delay.js
+++ b/src/date-delay.js
@@ -427,6 +427,19 @@
 }
 
 
+// The Date cache is used to limit the cost of parsing the same Date
+// strings over and over again.
+var Date_cache = {
+  // Cached time value.
+  time: $NaN,
+  // Cached year when interpreting the time as a local time. Only
+  // valid when the time matches cached time.
+  year: $NaN,
+  // String input for which the cached time is valid.
+  string: null
+};
+
+
 %SetCode($Date, function(year, month, date, hours, minutes, seconds, ms) {
   if (!%_IsConstructCall()) {
     // ECMA 262 - 15.9.2
@@ -442,6 +455,20 @@
   } else if (argc == 1) {
     if (IS_NUMBER(year)) {
       value = TimeClip(year);
+
+    } else if (IS_STRING(year)) {
+      // Probe the Date cache. If we already have a time value for the
+      // given time, we re-use that instead of parsing the string again.
+      var cache = Date_cache;
+      if (cache.string === year) {
+        value = cache.time;
+      } else {
+        value = DateParse(year);
+        cache.time = value;
+        cache.year = YearFromTime(LocalTimeNoCheck(value));
+        cache.string = year;
+      }
+
     } else {
       // According to ECMA 262, no hint should be given for this
       // conversion. However, ToPrimitive defaults to STRING_HINT for
@@ -537,8 +564,9 @@
 function GetFullYearFrom(aDate) {
   var t = DATE_VALUE(aDate);
   if (NUMBER_IS_NAN(t)) return t;
-  // Ignore the DST offset for year computations.
-  return YearFromTime(t + local_time_offset);
+  var cache = Date_cache;
+  if (cache.time === t) return cache.year;
+  return YearFromTime(LocalTimeNoCheck(t));
 }
 
 
@@ -634,7 +662,7 @@
 
 // -------------------------------------------------------------------
 
-// Reused output buffer.
+// Reused output buffer. Used when parsing date strings.
 var parse_buffer = $Array(7);
 
 // ECMA 262 - 15.9.4.2
diff --git a/src/debug-delay.js b/src/debug-delay.js
index 857c554..4f60851 100644
--- a/src/debug-delay.js
+++ b/src/debug-delay.js
@@ -223,7 +223,8 @@
 // Object representing a script break point. The script is referenced by its
 // script name or script id and the break point is represented as line and
 // column.
-function ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column) {
+function ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column,
+                          opt_groupId) {
   this.type_ = type;
   if (type == Debug.ScriptBreakPointType.ScriptId) {
     this.script_id_ = script_id_or_name;
@@ -232,6 +233,7 @@
   }
   this.line_ = opt_line || 0;
   this.column_ = opt_column;
+  this.groupId_ = opt_groupId;
   this.hit_count_ = 0;
   this.active_ = true;
   this.condition_ = null;
@@ -244,6 +246,11 @@
 };
 
 
+ScriptBreakPoint.prototype.groupId = function() {
+  return this.groupId_;
+};
+
+
 ScriptBreakPoint.prototype.type = function() {
   return this.type_;
 };
@@ -611,10 +618,12 @@
 // Sets a breakpoint in a script identified through id or name at the
 // specified source line and column within that line.
 Debug.setScriptBreakPoint = function(type, script_id_or_name,
-                                     opt_line, opt_column, opt_condition) {
+                                     opt_line, opt_column, opt_condition,
+                                     opt_groupId) {
   // Create script break point object.
   var script_break_point =
-      new ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column);
+      new ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column,
+                           opt_groupId);
 
   // Assign number to the new script break point and add it.
   script_break_point.number_ = next_break_point_number++;
@@ -636,19 +645,19 @@
 
 Debug.setScriptBreakPointById = function(script_id,
                                          opt_line, opt_column,
-                                         opt_condition) {
+                                         opt_condition, opt_groupId) {
   return this.setScriptBreakPoint(Debug.ScriptBreakPointType.ScriptId,
                                   script_id, opt_line, opt_column,
-                                  opt_condition)
+                                  opt_condition, opt_groupId);
 }
 
 
 Debug.setScriptBreakPointByName = function(script_name,
                                            opt_line, opt_column,
-                                           opt_condition) {
+                                           opt_condition, opt_groupId) {
   return this.setScriptBreakPoint(Debug.ScriptBreakPointType.ScriptName,
                                   script_name, opt_line, opt_column,
-                                  opt_condition)
+                                  opt_condition, opt_groupId);
 }
 
 
@@ -1210,6 +1219,8 @@
         this.changeBreakPointRequest_(request, response);
       } else if (request.command == 'clearbreakpoint') {
         this.clearBreakPointRequest_(request, response);
+      } else if (request.command == 'clearbreakpointgroup') {
+        this.clearBreakPointGroupRequest_(request, response);
       } else if (request.command == 'backtrace') {
         this.backtraceRequest_(request, response);
       } else if (request.command == 'frame') {
@@ -1325,6 +1336,7 @@
       true : request.arguments.enabled;
   var condition = request.arguments.condition;
   var ignoreCount = request.arguments.ignoreCount;
+  var groupId = request.arguments.groupId;
 
   // Check for legal arguments.
   if (!type || IS_UNDEFINED(target)) {
@@ -1378,10 +1390,11 @@
   } else if (type == 'script') {
     // set script break point.
     break_point_number =
-        Debug.setScriptBreakPointByName(target, line, column, condition);
+        Debug.setScriptBreakPointByName(target, line, column, condition,
+                                        groupId);
   } else {  // type == 'scriptId.
     break_point_number =
-        Debug.setScriptBreakPointById(target, line, column, condition);
+        Debug.setScriptBreakPointById(target, line, column, condition, groupId);
   }
 
   // Set additional break point properties.
@@ -1454,6 +1467,40 @@
 }
 
 
+DebugCommandProcessor.prototype.clearBreakPointGroupRequest_ = function(request, response) {
+  // Check for legal request.
+  if (!request.arguments) {
+    response.failed('Missing arguments');
+    return;
+  }
+
+  // Pull out arguments.
+  var group_id = request.arguments.groupId;
+
+  // Check for legal arguments.
+  if (!group_id) {
+    response.failed('Missing argument "groupId"');
+    return;
+  }
+  
+  var cleared_break_points = [];
+  var new_script_break_points = [];
+  for (var i = 0; i < script_break_points.length; i++) {
+    var next_break_point = script_break_points[i];
+    if (next_break_point.groupId() == group_id) {
+      cleared_break_points.push(next_break_point.number());
+      next_break_point.clear();
+    } else {
+      new_script_break_points.push(next_break_point);
+    }
+  }
+  script_break_points = new_script_break_points;
+
+  // Add the cleared break point numbers to the response.
+  response.body = { breakpoints: cleared_break_points };
+}
+
+
 DebugCommandProcessor.prototype.clearBreakPointRequest_ = function(request, response) {
   // Check for legal request.
   if (!request.arguments) {
diff --git a/src/factory.cc b/src/factory.cc
index 4d7a957..572180d 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -570,12 +570,14 @@
   int descriptor_count = 0;
 
   // Copy the descriptors from the array.
-  DescriptorWriter w(*result);
-  for (DescriptorReader r(*array); !r.eos(); r.advance()) {
-    if (!r.IsNullDescriptor()) {
-      w.WriteFrom(&r);
+  {
+    DescriptorWriter w(*result);
+    for (DescriptorReader r(*array); !r.eos(); r.advance()) {
+      if (!r.IsNullDescriptor()) {
+        w.WriteFrom(&r);
+      }
+      descriptor_count++;
     }
-    descriptor_count++;
   }
 
   // Number of duplicates detected.
@@ -594,7 +596,10 @@
     if (result->LinearSearch(*key, descriptor_count) ==
         DescriptorArray::kNotFound) {
       CallbacksDescriptor desc(*key, *entry, entry->property_attributes());
-      w.Write(&desc);
+      // We do not use a DescriptorWriter because SymbolFromString can
+      // allocate. A DescriptorWriter holds a raw pointer and is
+      // therefore not GC safe.
+      result->Set(descriptor_count, &desc);
       descriptor_count++;
     } else {
       duplicates++;
diff --git a/src/factory.h b/src/factory.h
index 90fb29c..0afdd76 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -28,6 +28,7 @@
 #ifndef V8_FACTORY_H_
 #define V8_FACTORY_H_
 
+#include "globals.h"
 #include "heap.h"
 #include "zone-inl.h"
 
@@ -299,13 +300,19 @@
                                 Handle<JSObject> instance,
                                 bool* pending_exception);
 
-#define ROOT_ACCESSOR(type, name) \
-  static Handle<type> name() { return Handle<type>(&Heap::name##_); }
+#define ROOT_ACCESSOR(type, name, camel_name)                                  \
+  static inline Handle<type> name() {                                          \
+    return Handle<type>(bit_cast<type**, Object**>(                            \
+        &Heap::roots_[Heap::k##camel_name##RootIndex]));                       \
+  }
   ROOT_LIST(ROOT_ACCESSOR)
 #undef ROOT_ACCESSOR_ACCESSOR
 
 #define SYMBOL_ACCESSOR(name, str) \
-  static Handle<String> name() { return Handle<String>(&Heap::name##_); }
+  static inline Handle<String> name() {                                        \
+    return Handle<String>(bit_cast<String**, Object**>(                        \
+        &Heap::roots_[Heap::k##name##RootIndex]));                             \
+  }
   SYMBOL_LIST(SYMBOL_ACCESSOR)
 #undef SYMBOL_ACCESSOR
 
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 814b2c4..9c696ed 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -208,8 +208,6 @@
 
 // Regexp
 DEFINE_bool(trace_regexps, false, "trace regexp execution")
-DEFINE_bool(regexp_native, true,
-            "use native code regexp implementation (IA32 only)")
 DEFINE_bool(regexp_optimization, true, "generate optimized regexp code")
 
 // Testing flags test/cctest/test-{flags,api,serialization}.cc
diff --git a/src/handles.cc b/src/handles.cc
index 44ca602..afed6e9 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -372,10 +372,10 @@
 
 
 Handle<JSValue> GetScriptWrapper(Handle<Script> script) {
-  Handle<Object> cache(reinterpret_cast<Object**>(script->wrapper()->proxy()));
-  if (!cache.is_null()) {
+  if (script->wrapper()->proxy() != NULL) {
     // Return the script wrapper directly from the cache.
-    return Handle<JSValue>(JSValue::cast(*cache));
+    return Handle<JSValue>(
+        reinterpret_cast<JSValue**>(script->wrapper()->proxy()));
   }
 
   // Construct a new script wrapper.
diff --git a/src/handles.h b/src/handles.h
index af638b8..a86dc96 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -42,7 +42,7 @@
 template<class T>
 class Handle {
  public:
-  INLINE(Handle(T** location))  { location_ = location; }
+  INLINE(Handle(T** location)) { location_ = location; }
   INLINE(explicit Handle(T* obj));
 
   INLINE(Handle()) : location_(NULL) {}
@@ -59,7 +59,7 @@
     location_ = reinterpret_cast<T**>(handle.location());
   }
 
-  INLINE(T* operator ->() const)  { return operator*(); }
+  INLINE(T* operator ->() const) { return operator*(); }
 
   // Check if this handle refers to the exact same object as the other handle.
   bool is_identical_to(const Handle<T> other) const {
diff --git a/src/heap-inl.h b/src/heap-inl.h
index 810d3d4..3b6efed 100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -216,7 +216,7 @@
 
 
 void Heap::SetLastScriptId(Object* last_script_id) {
-  last_script_id_ = last_script_id;
+  roots_[kLastScriptIdRootIndex] = last_script_id;
 }
 
 
diff --git a/src/heap.cc b/src/heap.cc
index 749013a..3706159 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -43,21 +43,10 @@
 namespace v8 {
 namespace internal {
 
-#define ROOT_ALLOCATION(type, name) type* Heap::name##_;
-  ROOT_LIST(ROOT_ALLOCATION)
-#undef ROOT_ALLOCATION
-
-
-#define STRUCT_ALLOCATION(NAME, Name, name) Map* Heap::name##_map_;
-  STRUCT_LIST(STRUCT_ALLOCATION)
-#undef STRUCT_ALLOCATION
-
-
-#define SYMBOL_ALLOCATION(name, string) String* Heap::name##_;
-  SYMBOL_LIST(SYMBOL_ALLOCATION)
-#undef SYMBOL_ALLOCATION
 
 String* Heap::hidden_symbol_;
+Object* Heap::roots_[Heap::kRootListLength];
+
 
 NewSpace Heap::new_space_;
 OldSpace* Heap::old_pointer_space_ = NULL;
@@ -284,9 +273,8 @@
 
   Counters::alive_after_last_gc.Set(SizeOfObjects());
 
-  SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table_);
-  Counters::symbol_table_capacity.Set(symbol_table->Capacity());
-  Counters::number_of_symbols.Set(symbol_table->NumberOfElements());
+  Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
+  Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
   ReportStatisticsAfterGC();
 #endif
@@ -405,8 +393,7 @@
 static void VerifySymbolTable() {
 #ifdef DEBUG
   SymbolTableVerifier verifier;
-  SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table());
-  symbol_table->IterateElements(&verifier);
+  Heap::symbol_table()->IterateElements(&verifier);
 #endif  // DEBUG
 }
 
@@ -1012,7 +999,7 @@
   if (result->IsFailure()) return result;
 
   // Map::cast cannot be used due to uninitialized map field.
-  reinterpret_cast<Map*>(result)->set_map(meta_map());
+  reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
   reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
   reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
   reinterpret_cast<Map*>(result)->set_inobject_properties(0);
@@ -1041,41 +1028,66 @@
 }
 
 
+const Heap::StringTypeTable Heap::string_type_table[] = {
+#define STRING_TYPE_ELEMENT(type, size, name, camel_name)                      \
+  {type, size, k##camel_name##MapRootIndex},
+  STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
+#undef STRING_TYPE_ELEMENT
+};
+
+
+const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = {
+#define CONSTANT_SYMBOL_ELEMENT(name, contents)                                \
+  {contents, k##name##RootIndex},
+  SYMBOL_LIST(CONSTANT_SYMBOL_ELEMENT)
+#undef CONSTANT_SYMBOL_ELEMENT
+};
+
+
+const Heap::StructTable Heap::struct_table[] = {
+#define STRUCT_TABLE_ELEMENT(NAME, Name, name)                                 \
+  { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
+  STRUCT_LIST(STRUCT_TABLE_ELEMENT)
+#undef STRUCT_TABLE_ELEMENT
+};
+
+
 bool Heap::CreateInitialMaps() {
   Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
   if (obj->IsFailure()) return false;
 
   // Map::cast cannot be used due to uninitialized map field.
-  meta_map_ = reinterpret_cast<Map*>(obj);
-  meta_map()->set_map(meta_map());
+  Map* new_meta_map = reinterpret_cast<Map*>(obj);
+  set_meta_map(new_meta_map);
+  new_meta_map->set_map(new_meta_map);
 
   obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
   if (obj->IsFailure()) return false;
-  fixed_array_map_ = Map::cast(obj);
+  set_fixed_array_map(Map::cast(obj));
 
   obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
   if (obj->IsFailure()) return false;
-  oddball_map_ = Map::cast(obj);
+  set_oddball_map(Map::cast(obj));
 
   obj = AllocatePartialMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
                            JSGlobalPropertyCell::kSize);
   if (obj->IsFailure()) return false;
-  global_property_cell_map_ = Map::cast(obj);
+  set_global_property_cell_map(Map::cast(obj));
 
   // Allocate the empty array
   obj = AllocateEmptyFixedArray();
   if (obj->IsFailure()) return false;
-  empty_fixed_array_ = FixedArray::cast(obj);
+  set_empty_fixed_array(FixedArray::cast(obj));
 
   obj = Allocate(oddball_map(), OLD_DATA_SPACE);
   if (obj->IsFailure()) return false;
-  null_value_ = obj;
+  set_null_value(obj);
 
   // Allocate the empty descriptor array.  AllocateMap can now be used.
   obj = AllocateEmptyFixedArray();
   if (obj->IsFailure()) return false;
   // There is a check against empty_descriptor_array() in cast().
-  empty_descriptor_array_ = reinterpret_cast<DescriptorArray*>(obj);
+  set_empty_descriptor_array(reinterpret_cast<DescriptorArray*>(obj));
 
   // Fix the instance_descriptors for the existing maps.
   meta_map()->set_instance_descriptors(empty_descriptor_array());
@@ -1105,95 +1117,95 @@
 
   obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
   if (obj->IsFailure()) return false;
-  heap_number_map_ = Map::cast(obj);
+  set_heap_number_map(Map::cast(obj));
 
   obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
   if (obj->IsFailure()) return false;
-  proxy_map_ = Map::cast(obj);
+  set_proxy_map(Map::cast(obj));
 
-#define ALLOCATE_STRING_MAP(type, size, name)   \
-    obj = AllocateMap(type, size);              \
-    if (obj->IsFailure()) return false;         \
-    name##_map_ = Map::cast(obj);
-  STRING_TYPE_LIST(ALLOCATE_STRING_MAP);
-#undef ALLOCATE_STRING_MAP
+  for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
+    const StringTypeTable& entry = string_type_table[i];
+    obj = AllocateMap(entry.type, entry.size);
+    if (obj->IsFailure()) return false;
+    roots_[entry.index] = Map::cast(obj);
+  }
 
   obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize);
   if (obj->IsFailure()) return false;
-  undetectable_short_string_map_ = Map::cast(obj);
-  undetectable_short_string_map_->set_is_undetectable();
+  set_undetectable_short_string_map(Map::cast(obj));
+  Map::cast(obj)->set_is_undetectable();
 
   obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize);
   if (obj->IsFailure()) return false;
-  undetectable_medium_string_map_ = Map::cast(obj);
-  undetectable_medium_string_map_->set_is_undetectable();
+  set_undetectable_medium_string_map(Map::cast(obj));
+  Map::cast(obj)->set_is_undetectable();
 
   obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize);
   if (obj->IsFailure()) return false;
-  undetectable_long_string_map_ = Map::cast(obj);
-  undetectable_long_string_map_->set_is_undetectable();
+  set_undetectable_long_string_map(Map::cast(obj));
+  Map::cast(obj)->set_is_undetectable();
 
   obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
   if (obj->IsFailure()) return false;
-  undetectable_short_ascii_string_map_ = Map::cast(obj);
-  undetectable_short_ascii_string_map_->set_is_undetectable();
+  set_undetectable_short_ascii_string_map(Map::cast(obj));
+  Map::cast(obj)->set_is_undetectable();
 
   obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
   if (obj->IsFailure()) return false;
-  undetectable_medium_ascii_string_map_ = Map::cast(obj);
-  undetectable_medium_ascii_string_map_->set_is_undetectable();
+  set_undetectable_medium_ascii_string_map(Map::cast(obj));
+  Map::cast(obj)->set_is_undetectable();
 
   obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
   if (obj->IsFailure()) return false;
-  undetectable_long_ascii_string_map_ = Map::cast(obj);
-  undetectable_long_ascii_string_map_->set_is_undetectable();
+  set_undetectable_long_ascii_string_map(Map::cast(obj));
+  Map::cast(obj)->set_is_undetectable();
 
   obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kAlignedSize);
   if (obj->IsFailure()) return false;
-  byte_array_map_ = Map::cast(obj);
+  set_byte_array_map(Map::cast(obj));
 
   obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
   if (obj->IsFailure()) return false;
-  code_map_ = Map::cast(obj);
+  set_code_map(Map::cast(obj));
 
   obj = AllocateMap(FILLER_TYPE, kPointerSize);
   if (obj->IsFailure()) return false;
-  one_word_filler_map_ = Map::cast(obj);
+  set_one_word_filler_map(Map::cast(obj));
 
   obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
   if (obj->IsFailure()) return false;
-  two_word_filler_map_ = Map::cast(obj);
+  set_two_word_filler_map(Map::cast(obj));
 
-#define ALLOCATE_STRUCT_MAP(NAME, Name, name)      \
-  obj = AllocateMap(NAME##_TYPE, Name::kSize);     \
-  if (obj->IsFailure()) return false;              \
-  name##_map_ = Map::cast(obj);
-  STRUCT_LIST(ALLOCATE_STRUCT_MAP)
-#undef ALLOCATE_STRUCT_MAP
+  for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) {
+    const StructTable& entry = struct_table[i];
+    obj = AllocateMap(entry.type, entry.size);
+    if (obj->IsFailure()) return false;
+    roots_[entry.index] = Map::cast(obj);
+  }
 
   obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
   if (obj->IsFailure()) return false;
-  hash_table_map_ = Map::cast(obj);
+  set_hash_table_map(Map::cast(obj));
 
   obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
   if (obj->IsFailure()) return false;
-  context_map_ = Map::cast(obj);
+  set_context_map(Map::cast(obj));
 
   obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
   if (obj->IsFailure()) return false;
-  catch_context_map_ = Map::cast(obj);
+  set_catch_context_map(Map::cast(obj));
 
   obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
   if (obj->IsFailure()) return false;
-  global_context_map_ = Map::cast(obj);
+  set_global_context_map(Map::cast(obj));
 
   obj = AllocateMap(JS_FUNCTION_TYPE, JSFunction::kSize);
   if (obj->IsFailure()) return false;
-  boilerplate_function_map_ = Map::cast(obj);
+  set_boilerplate_function_map(Map::cast(obj));
 
   obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize);
   if (obj->IsFailure()) return false;
-  shared_function_info_map_ = Map::cast(obj);
+  set_shared_function_info_map(Map::cast(obj));
 
   ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
   return true;
@@ -1254,15 +1266,15 @@
 
   obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
   if (obj->IsFailure()) return false;
-  neander_map_ = Map::cast(obj);
+  set_neander_map(Map::cast(obj));
 
-  obj = Heap::AllocateJSObjectFromMap(neander_map_);
+  obj = Heap::AllocateJSObjectFromMap(neander_map());
   if (obj->IsFailure()) return false;
   Object* elements = AllocateFixedArray(2);
   if (elements->IsFailure()) return false;
   FixedArray::cast(elements)->set(0, Smi::FromInt(0));
   JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
-  message_listeners_ = JSObject::cast(obj);
+  set_message_listeners(JSObject::cast(obj));
 
   return true;
 }
@@ -1270,25 +1282,25 @@
 
 void Heap::CreateCEntryStub() {
   CEntryStub stub;
-  c_entry_code_ = *stub.GetCode();
+  set_c_entry_code(*stub.GetCode());
 }
 
 
 void Heap::CreateCEntryDebugBreakStub() {
   CEntryDebugBreakStub stub;
-  c_entry_debug_break_code_ = *stub.GetCode();
+  set_c_entry_debug_break_code(*stub.GetCode());
 }
 
 
 void Heap::CreateJSEntryStub() {
   JSEntryStub stub;
-  js_entry_code_ = *stub.GetCode();
+  set_js_entry_code(*stub.GetCode());
 }
 
 
 void Heap::CreateJSConstructEntryStub() {
   JSConstructEntryStub stub;
-  js_construct_entry_code_ = *stub.GetCode();
+  set_js_construct_entry_code(*stub.GetCode());
 }
 
 
@@ -1319,34 +1331,35 @@
   // The -0 value must be set before NumberFromDouble works.
   obj = AllocateHeapNumber(-0.0, TENURED);
   if (obj->IsFailure()) return false;
-  minus_zero_value_ = obj;
-  ASSERT(signbit(minus_zero_value_->Number()) != 0);
+  set_minus_zero_value(obj);
+  ASSERT(signbit(minus_zero_value()->Number()) != 0);
 
   obj = AllocateHeapNumber(OS::nan_value(), TENURED);
   if (obj->IsFailure()) return false;
-  nan_value_ = obj;
+  set_nan_value(obj);
 
   obj = Allocate(oddball_map(), OLD_DATA_SPACE);
   if (obj->IsFailure()) return false;
-  undefined_value_ = obj;
+  set_undefined_value(obj);
   ASSERT(!InNewSpace(undefined_value()));
 
   // Allocate initial symbol table.
   obj = SymbolTable::Allocate(kInitialSymbolTableSize);
   if (obj->IsFailure()) return false;
-  symbol_table_ = obj;
+  // Don't use set_symbol_table() due to asserts.
+  roots_[kSymbolTableRootIndex] = obj;
 
   // Assign the print strings for oddballs after creating symboltable.
   Object* symbol = LookupAsciiSymbol("undefined");
   if (symbol->IsFailure()) return false;
-  Oddball::cast(undefined_value_)->set_to_string(String::cast(symbol));
-  Oddball::cast(undefined_value_)->set_to_number(nan_value_);
+  Oddball::cast(undefined_value())->set_to_string(String::cast(symbol));
+  Oddball::cast(undefined_value())->set_to_number(nan_value());
 
   // Assign the print strings for oddballs after creating symboltable.
   symbol = LookupAsciiSymbol("null");
   if (symbol->IsFailure()) return false;
-  Oddball::cast(null_value_)->set_to_string(String::cast(symbol));
-  Oddball::cast(null_value_)->set_to_number(Smi::FromInt(0));
+  Oddball::cast(null_value())->set_to_string(String::cast(symbol));
+  Oddball::cast(null_value())->set_to_number(Smi::FromInt(0));
 
   // Allocate the null_value
   obj = Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
@@ -1354,32 +1367,31 @@
 
   obj = CreateOddball(oddball_map(), "true", Smi::FromInt(1));
   if (obj->IsFailure()) return false;
-  true_value_ = obj;
+  set_true_value(obj);
 
   obj = CreateOddball(oddball_map(), "false", Smi::FromInt(0));
   if (obj->IsFailure()) return false;
-  false_value_ = obj;
+  set_false_value(obj);
 
   obj = CreateOddball(oddball_map(), "hole", Smi::FromInt(-1));
   if (obj->IsFailure()) return false;
-  the_hole_value_ = obj;
+  set_the_hole_value(obj);
 
   // Allocate the empty string.
   obj = AllocateRawAsciiString(0, TENURED);
   if (obj->IsFailure()) return false;
-  empty_string_ = String::cast(obj);
+  set_empty_string(String::cast(obj));
 
-#define SYMBOL_INITIALIZE(name, string)                 \
-  obj = LookupAsciiSymbol(string);                      \
-  if (obj->IsFailure()) return false;                   \
-  (name##_) = String::cast(obj);
-  SYMBOL_LIST(SYMBOL_INITIALIZE)
-#undef SYMBOL_INITIALIZE
+  for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) {
+    obj = LookupAsciiSymbol(constant_symbol_table[i].contents);
+    if (obj->IsFailure()) return false;
+    roots_[constant_symbol_table[i].index] = String::cast(obj);
+  }
 
   // Allocate the hidden symbol which is used to identify the hidden properties
   // in JSObjects. The hash code has a special value so that it will not match
   // the empty string when searching for the property. It cannot be part of the
-  // SYMBOL_LIST because it needs to be allocated manually with the special
+  // loop above because it needs to be allocated manually with the special
   // hash code in place. The hash code for the hidden_symbol is zero to ensure
   // that it will always be at the first entry in property descriptors.
   obj = AllocateSymbol(CStrVector(""), 0, String::kHashComputedMask);
@@ -1389,37 +1401,37 @@
   // Allocate the proxy for __proto__.
   obj = AllocateProxy((Address) &Accessors::ObjectPrototype);
   if (obj->IsFailure()) return false;
-  prototype_accessors_ = Proxy::cast(obj);
+  set_prototype_accessors(Proxy::cast(obj));
 
   // Allocate the code_stubs dictionary.
   obj = NumberDictionary::Allocate(4);
   if (obj->IsFailure()) return false;
-  code_stubs_ = NumberDictionary::cast(obj);
+  set_code_stubs(NumberDictionary::cast(obj));
 
   // Allocate the non_monomorphic_cache used in stub-cache.cc
   obj = NumberDictionary::Allocate(4);
   if (obj->IsFailure()) return false;
-  non_monomorphic_cache_ = NumberDictionary::cast(obj);
+  set_non_monomorphic_cache(NumberDictionary::cast(obj));
 
   CreateFixedStubs();
 
   // Allocate the number->string conversion cache
   obj = AllocateFixedArray(kNumberStringCacheSize * 2);
   if (obj->IsFailure()) return false;
-  number_string_cache_ = FixedArray::cast(obj);
+  set_number_string_cache(FixedArray::cast(obj));
 
   // Allocate cache for single character strings.
   obj = AllocateFixedArray(String::kMaxAsciiCharCode+1);
   if (obj->IsFailure()) return false;
-  single_character_string_cache_ = FixedArray::cast(obj);
+  set_single_character_string_cache(FixedArray::cast(obj));
 
   // Allocate cache for external strings pointing to native source code.
   obj = AllocateFixedArray(Natives::GetBuiltinsCount());
   if (obj->IsFailure()) return false;
-  natives_source_cache_ = FixedArray::cast(obj);
+  set_natives_source_cache(FixedArray::cast(obj));
 
   // Handling of script id generation is in Factory::NewScript.
-  last_script_id_ = undefined_value();
+  set_last_script_id(undefined_value());
 
   // Initialize keyed lookup cache.
   KeyedLookupCache::Clear();
@@ -1457,13 +1469,13 @@
   } else {
     hash = double_get_hash(number->Number());
   }
-  Object* key = number_string_cache_->get(hash * 2);
+  Object* key = number_string_cache()->get(hash * 2);
   if (key == number) {
-    return String::cast(number_string_cache_->get(hash * 2 + 1));
+    return String::cast(number_string_cache()->get(hash * 2 + 1));
   } else if (key->IsHeapNumber() &&
              number->IsHeapNumber() &&
              key->Number() == number->Number()) {
-    return String::cast(number_string_cache_->get(hash * 2 + 1));
+    return String::cast(number_string_cache()->get(hash * 2 + 1));
   }
   return undefined_value();
 }
@@ -1473,12 +1485,12 @@
   int hash;
   if (number->IsSmi()) {
     hash = smi_get_hash(Smi::cast(number));
-    number_string_cache_->set(hash * 2, number, SKIP_WRITE_BARRIER);
+    number_string_cache()->set(hash * 2, number, SKIP_WRITE_BARRIER);
   } else {
     hash = double_get_hash(number->Number());
-    number_string_cache_->set(hash * 2, number);
+    number_string_cache()->set(hash * 2, number);
   }
-  number_string_cache_->set(hash * 2 + 1, string);
+  number_string_cache()->set(hash * 2 + 1, string);
 }
 
 
@@ -1491,19 +1503,19 @@
   static const DoubleRepresentation plus_zero(0.0);
   static const DoubleRepresentation minus_zero(-0.0);
   static const DoubleRepresentation nan(OS::nan_value());
-  ASSERT(minus_zero_value_ != NULL);
+  ASSERT(minus_zero_value() != NULL);
   ASSERT(sizeof(plus_zero.value) == sizeof(plus_zero.bits));
 
   DoubleRepresentation rep(value);
   if (rep.bits == plus_zero.bits) return Smi::FromInt(0);  // not uncommon
   if (rep.bits == minus_zero.bits) {
     return new_object ? AllocateHeapNumber(-0.0, pretenure)
-                      : minus_zero_value_;
+                      : minus_zero_value();
   }
   if (rep.bits == nan.bits) {
     return new_object
         ? AllocateHeapNumber(OS::nan_value(), pretenure)
-        : nan_value_;
+        : nan_value();
   }
 
   // Try to represent the value as a tagged small integer.
@@ -2754,10 +2766,11 @@
 
 Object* Heap::LookupSymbol(Vector<const char> string) {
   Object* symbol = NULL;
-  Object* new_table =
-      SymbolTable::cast(symbol_table_)->LookupSymbol(string, &symbol);
+  Object* new_table = symbol_table()->LookupSymbol(string, &symbol);
   if (new_table->IsFailure()) return new_table;
-  symbol_table_ = new_table;
+  // Can't use set_symbol_table because SymbolTable::cast knows that
+  // SymbolTable is a singleton and checks for identity.
+  roots_[kSymbolTableRootIndex] = new_table;
   ASSERT(symbol != NULL);
   return symbol;
 }
@@ -2766,10 +2779,11 @@
 Object* Heap::LookupSymbol(String* string) {
   if (string->IsSymbol()) return string;
   Object* symbol = NULL;
-  Object* new_table =
-      SymbolTable::cast(symbol_table_)->LookupString(string, &symbol);
+  Object* new_table = symbol_table()->LookupString(string, &symbol);
   if (new_table->IsFailure()) return new_table;
-  symbol_table_ = new_table;
+  // Can't use set_symbol_table because SymbolTable::cast knows that
+  // SymbolTable is a singleton and checks for identity.
+  roots_[kSymbolTableRootIndex] = new_table;
   ASSERT(symbol != NULL);
   return symbol;
 }
@@ -2780,8 +2794,7 @@
     *symbol = string;
     return true;
   }
-  SymbolTable* table = SymbolTable::cast(symbol_table_);
-  return table->LookupSymbolIfExists(string, symbol);
+  return symbol_table()->LookupSymbolIfExists(string, symbol);
 }
 
 
@@ -2868,28 +2881,15 @@
 
 void Heap::IterateRoots(ObjectVisitor* v) {
   IterateStrongRoots(v);
-  v->VisitPointer(reinterpret_cast<Object**>(&symbol_table_));
+  v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
   SYNCHRONIZE_TAG("symbol_table");
 }
 
 
 void Heap::IterateStrongRoots(ObjectVisitor* v) {
-#define ROOT_ITERATE(type, name) \
-  v->VisitPointer(bit_cast<Object**, type**>(&name##_));
-  STRONG_ROOT_LIST(ROOT_ITERATE);
-#undef ROOT_ITERATE
+  v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
   SYNCHRONIZE_TAG("strong_root_list");
 
-#define STRUCT_MAP_ITERATE(NAME, Name, name) \
-  v->VisitPointer(bit_cast<Object**, Map**>(&name##_map_));
-  STRUCT_LIST(STRUCT_MAP_ITERATE);
-#undef STRUCT_MAP_ITERATE
-  SYNCHRONIZE_TAG("struct_map");
-
-#define SYMBOL_ITERATE(name, string) \
-  v->VisitPointer(bit_cast<Object**, String**>(&name##_));
-  SYMBOL_LIST(SYMBOL_ITERATE)
-#undef SYMBOL_ITERATE
   v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_));
   SYNCHRONIZE_TAG("symbol");
 
@@ -3366,8 +3366,8 @@
   // Lump all the string types together.
   int string_number = 0;
   int string_bytes = 0;
-#define INCREMENT_SIZE(type, size, name)   \
-    string_number += info[type].number();  \
+#define INCREMENT_SIZE(type, size, name, camel_name)   \
+    string_number += info[type].number();              \
     string_bytes += info[type].bytes();
   STRING_TYPE_LIST(INCREMENT_SIZE)
 #undef INCREMENT_SIZE
diff --git a/src/heap.h b/src/heap.h
index 9f61ce2..c88b5e6 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -34,105 +34,107 @@
 namespace internal {
 
 // Defines all the roots in Heap.
-#define STRONG_ROOT_LIST(V)                             \
-  V(Map, meta_map)                                      \
-  V(Map, heap_number_map)                               \
-  V(Map, short_string_map)                              \
-  V(Map, medium_string_map)                             \
-  V(Map, long_string_map)                               \
-  V(Map, short_ascii_string_map)                        \
-  V(Map, medium_ascii_string_map)                       \
-  V(Map, long_ascii_string_map)                         \
-  V(Map, short_symbol_map)                              \
-  V(Map, medium_symbol_map)                             \
-  V(Map, long_symbol_map)                               \
-  V(Map, short_ascii_symbol_map)                        \
-  V(Map, medium_ascii_symbol_map)                       \
-  V(Map, long_ascii_symbol_map)                         \
-  V(Map, short_cons_symbol_map)                         \
-  V(Map, medium_cons_symbol_map)                        \
-  V(Map, long_cons_symbol_map)                          \
-  V(Map, short_cons_ascii_symbol_map)                   \
-  V(Map, medium_cons_ascii_symbol_map)                  \
-  V(Map, long_cons_ascii_symbol_map)                    \
-  V(Map, short_sliced_symbol_map)                       \
-  V(Map, medium_sliced_symbol_map)                      \
-  V(Map, long_sliced_symbol_map)                        \
-  V(Map, short_sliced_ascii_symbol_map)                 \
-  V(Map, medium_sliced_ascii_symbol_map)                \
-  V(Map, long_sliced_ascii_symbol_map)                  \
-  V(Map, short_external_symbol_map)                     \
-  V(Map, medium_external_symbol_map)                    \
-  V(Map, long_external_symbol_map)                      \
-  V(Map, short_external_ascii_symbol_map)               \
-  V(Map, medium_external_ascii_symbol_map)              \
-  V(Map, long_external_ascii_symbol_map)                \
-  V(Map, short_cons_string_map)                         \
-  V(Map, medium_cons_string_map)                        \
-  V(Map, long_cons_string_map)                          \
-  V(Map, short_cons_ascii_string_map)                   \
-  V(Map, medium_cons_ascii_string_map)                  \
-  V(Map, long_cons_ascii_string_map)                    \
-  V(Map, short_sliced_string_map)                       \
-  V(Map, medium_sliced_string_map)                      \
-  V(Map, long_sliced_string_map)                        \
-  V(Map, short_sliced_ascii_string_map)                 \
-  V(Map, medium_sliced_ascii_string_map)                \
-  V(Map, long_sliced_ascii_string_map)                  \
-  V(Map, short_external_string_map)                     \
-  V(Map, medium_external_string_map)                    \
-  V(Map, long_external_string_map)                      \
-  V(Map, short_external_ascii_string_map)               \
-  V(Map, medium_external_ascii_string_map)              \
-  V(Map, long_external_ascii_string_map)                \
-  V(Map, undetectable_short_string_map)                 \
-  V(Map, undetectable_medium_string_map)                \
-  V(Map, undetectable_long_string_map)                  \
-  V(Map, undetectable_short_ascii_string_map)           \
-  V(Map, undetectable_medium_ascii_string_map)          \
-  V(Map, undetectable_long_ascii_string_map)            \
-  V(Map, byte_array_map)                                \
-  V(Map, fixed_array_map)                               \
-  V(Map, hash_table_map)                                \
-  V(Map, context_map)                                   \
-  V(Map, catch_context_map)                             \
-  V(Map, global_context_map)                            \
-  V(Map, code_map)                                      \
-  V(Map, oddball_map)                                   \
-  V(Map, global_property_cell_map)                      \
-  V(Map, boilerplate_function_map)                      \
-  V(Map, shared_function_info_map)                      \
-  V(Map, proxy_map)                                     \
-  V(Map, one_word_filler_map)                           \
-  V(Map, two_word_filler_map)                           \
-  V(Object, nan_value)                                  \
-  V(Object, undefined_value)                            \
-  V(Object, minus_zero_value)                           \
-  V(Object, null_value)                                 \
-  V(Object, true_value)                                 \
-  V(Object, false_value)                                \
-  V(String, empty_string)                               \
-  V(FixedArray, empty_fixed_array)                      \
-  V(DescriptorArray, empty_descriptor_array)            \
-  V(Object, the_hole_value)                             \
-  V(Map, neander_map)                                   \
-  V(JSObject, message_listeners)                        \
-  V(Proxy, prototype_accessors)                         \
-  V(NumberDictionary, code_stubs)                       \
-  V(NumberDictionary, non_monomorphic_cache)            \
-  V(Code, js_entry_code)                                \
-  V(Code, js_construct_entry_code)                      \
-  V(Code, c_entry_code)                                 \
-  V(Code, c_entry_debug_break_code)                     \
-  V(FixedArray, number_string_cache)                    \
-  V(FixedArray, single_character_string_cache)          \
-  V(FixedArray, natives_source_cache)                   \
-  V(Object, last_script_id)
+#define STRONG_ROOT_LIST(V)                                                    \
+  V(Map, meta_map, MetaMap)                                                    \
+  V(Map, heap_number_map, HeapNumberMap)                                       \
+  V(Map, short_string_map, ShortStringMap)                                     \
+  V(Map, medium_string_map, MediumStringMap)                                   \
+  V(Map, long_string_map, LongStringMap)                                       \
+  V(Map, short_ascii_string_map, ShortAsciiStringMap)                          \
+  V(Map, medium_ascii_string_map, MediumAsciiStringMap)                        \
+  V(Map, long_ascii_string_map, LongAsciiStringMap)                            \
+  V(Map, short_symbol_map, ShortSymbolMap)                                     \
+  V(Map, medium_symbol_map, MediumSymbolMap)                                   \
+  V(Map, long_symbol_map, LongSymbolMap)                                       \
+  V(Map, short_ascii_symbol_map, ShortAsciiSymbolMap)                          \
+  V(Map, medium_ascii_symbol_map, MediumAsciiSymbolMap)                        \
+  V(Map, long_ascii_symbol_map, LongAsciiSymbolMap)                            \
+  V(Map, short_cons_symbol_map, ShortConsSymbolMap)                            \
+  V(Map, medium_cons_symbol_map, MediumConsSymbolMap)                          \
+  V(Map, long_cons_symbol_map, LongConsSymbolMap)                              \
+  V(Map, short_cons_ascii_symbol_map, ShortConsAsciiSymbolMap)                 \
+  V(Map, medium_cons_ascii_symbol_map, MediumConsAsciiSymbolMap)               \
+  V(Map, long_cons_ascii_symbol_map, LongConsAsciiSymbolMap)                   \
+  V(Map, short_sliced_symbol_map, ShortSlicedSymbolMap)                        \
+  V(Map, medium_sliced_symbol_map, MediumSlicedSymbolMap)                      \
+  V(Map, long_sliced_symbol_map, LongSlicedSymbolMap)                          \
+  V(Map, short_sliced_ascii_symbol_map, ShortSlicedAsciiSymbolMap)             \
+  V(Map, medium_sliced_ascii_symbol_map, MediumSlicedAsciiSymbolMap)           \
+  V(Map, long_sliced_ascii_symbol_map, LongSlicedAsciiSymbolMap)               \
+  V(Map, short_external_symbol_map, ShortExternalSymbolMap)                    \
+  V(Map, medium_external_symbol_map, MediumExternalSymbolMap)                  \
+  V(Map, long_external_symbol_map, LongExternalSymbolMap)                      \
+  V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap)         \
+  V(Map, medium_external_ascii_symbol_map, MediumExternalAsciiSymbolMap)       \
+  V(Map, long_external_ascii_symbol_map, LongExternalAsciiSymbolMap)           \
+  V(Map, short_cons_string_map, ShortConsStringMap)                            \
+  V(Map, medium_cons_string_map, MediumConsStringMap)                          \
+  V(Map, long_cons_string_map, LongConsStringMap)                              \
+  V(Map, short_cons_ascii_string_map, ShortConsAsciiStringMap)                 \
+  V(Map, medium_cons_ascii_string_map, MediumConsAsciiStringMap)               \
+  V(Map, long_cons_ascii_string_map, LongConsAsciiStringMap)                   \
+  V(Map, short_sliced_string_map, ShortSlicedStringMap)                        \
+  V(Map, medium_sliced_string_map, MediumSlicedStringMap)                      \
+  V(Map, long_sliced_string_map, LongSlicedStringMap)                          \
+  V(Map, short_sliced_ascii_string_map, ShortSlicedAsciiStringMap)             \
+  V(Map, medium_sliced_ascii_string_map, MediumSlicedAsciiStringMap)           \
+  V(Map, long_sliced_ascii_string_map, LongSlicedAsciiStringMap)               \
+  V(Map, short_external_string_map, ShortExternalStringMap)                    \
+  V(Map, medium_external_string_map, MediumExternalStringMap)                  \
+  V(Map, long_external_string_map, LongExternalStringMap)                      \
+  V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap)         \
+  V(Map, medium_external_ascii_string_map, MediumExternalAsciiStringMap)       \
+  V(Map, long_external_ascii_string_map, LongExternalAsciiStringMap)           \
+  V(Map, undetectable_short_string_map, UndetectableShortStringMap)            \
+  V(Map, undetectable_medium_string_map, UndetectableMediumStringMap)          \
+  V(Map, undetectable_long_string_map, UndetectableLongStringMap)              \
+  V(Map, undetectable_short_ascii_string_map, UndetectableShortAsciiStringMap) \
+  V(Map,                                                                       \
+    undetectable_medium_ascii_string_map,                                      \
+    UndetectableMediumAsciiStringMap)                                          \
+  V(Map, undetectable_long_ascii_string_map, UndetectableLongAsciiStringMap)   \
+  V(Map, byte_array_map, ByteArrayMap)                                         \
+  V(Map, fixed_array_map, FixedArrayMap)                                       \
+  V(Map, hash_table_map, HashTableMap)                                         \
+  V(Map, context_map, ContextMap)                                              \
+  V(Map, catch_context_map, CatchContextMap)                                   \
+  V(Map, global_context_map, GlobalContextMap)                                 \
+  V(Map, code_map, CodeMap)                                                    \
+  V(Map, oddball_map, OddballMap)                                              \
+  V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
+  V(Map, boilerplate_function_map, BoilerplateFunctionMap)                     \
+  V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
+  V(Map, proxy_map, ProxyMap)                                                  \
+  V(Map, one_word_filler_map, OneWordFillerMap)                                \
+  V(Map, two_word_filler_map, TwoWordFillerMap)                                \
+  V(Object, nan_value, NanValue)                                               \
+  V(Object, undefined_value, UndefinedValue)                                   \
+  V(Object, minus_zero_value, MinusZeroValue)                                  \
+  V(Object, null_value, NullValue)                                             \
+  V(Object, true_value, TrueValue)                                             \
+  V(Object, false_value, FalseValue)                                           \
+  V(String, empty_string, EmptyString)                                         \
+  V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
+  V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
+  V(Object, the_hole_value, TheHoleValue)                                      \
+  V(Map, neander_map, NeanderMap)                                              \
+  V(JSObject, message_listeners, MessageListeners)                             \
+  V(Proxy, prototype_accessors, PrototypeAccessors)                            \
+  V(NumberDictionary, code_stubs, CodeStubs)                                   \
+  V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache)              \
+  V(Code, js_entry_code, JsEntryCode)                                          \
+  V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
+  V(Code, c_entry_code, CEntryCode)                                            \
+  V(Code, c_entry_debug_break_code, CEntryDebugBreakCode)                      \
+  V(FixedArray, number_string_cache, NumberStringCache)                        \
+  V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
+  V(FixedArray, natives_source_cache, NativesSourceCache)                      \
+  V(Object, last_script_id, LastScriptId)
 
 
 #define ROOT_LIST(V)                                  \
   STRONG_ROOT_LIST(V)                                 \
-  V(Object, symbol_table)
+  V(SymbolTable, symbol_table, SymbolTable)
 
 #define SYMBOL_LIST(V)                                                   \
   V(Array_symbol, "Array")                                               \
@@ -636,18 +638,29 @@
     global_gc_epilogue_callback_ = callback;
   }
 
-  // Heap roots
-#define ROOT_ACCESSOR(type, name) static type* name() { return name##_; }
+  // Heap root getters.  We have versions with and without type::cast() here.
+  // You can't use type::cast during GC because the assert fails.
+#define ROOT_ACCESSOR(type, name, camel_name)                                  \
+  static inline type* name() {                                                 \
+    return type::cast(roots_[k##camel_name##RootIndex]);                       \
+  }                                                                            \
+  static inline type* raw_unchecked_##name() {                                 \
+    return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]);          \
+  }
   ROOT_LIST(ROOT_ACCESSOR)
 #undef ROOT_ACCESSOR
 
 // Utility type maps
-#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
-    static Map* name##_map() { return name##_map_; }
+#define STRUCT_MAP_ACCESSOR(NAME, Name, name)                                  \
+    static inline Map* name##_map() {                                          \
+      return Map::cast(roots_[k##Name##MapRootIndex]);                         \
+    }
   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
 #undef STRUCT_MAP_ACCESSOR
 
-#define SYMBOL_ACCESSOR(name, str) static String* name() { return name##_; }
+#define SYMBOL_ACCESSOR(name, str) static inline String* name() {              \
+    return String::cast(roots_[k##name##RootIndex]);                           \
+  }
   SYMBOL_LIST(SYMBOL_ACCESSOR)
 #undef SYMBOL_ACCESSOR
 
@@ -692,11 +705,13 @@
   static inline AllocationSpace TargetSpaceId(InstanceType type);
 
   // Sets the stub_cache_ (only used when expanding the dictionary).
-  static void set_code_stubs(NumberDictionary* value) { code_stubs_ = value; }
+  static void public_set_code_stubs(NumberDictionary* value) {
+    roots_[kCodeStubsRootIndex] = value;
+  }
 
   // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
-  static void set_non_monomorphic_cache(NumberDictionary* value) {
-    non_monomorphic_cache_ = value;
+  static void public_set_non_monomorphic_cache(NumberDictionary* value) {
+    roots_[kNonMonomorphicCacheRootIndex] = value;
   }
 
   // Update the next script id.
@@ -849,6 +864,13 @@
   static int mc_count_;  // how many mark-compact collections happened
   static int gc_count_;  // how many gc happened
 
+#define ROOT_ACCESSOR(type, name, camel_name)                                  \
+  static inline void set_##name(type* value) {                                 \
+    roots_[k##camel_name##RootIndex] = value;                                  \
+  }
+  ROOT_LIST(ROOT_ACCESSOR)
+#undef ROOT_ACCESSOR
+
 #ifdef DEBUG
   static bool allocation_allowed_;
 
@@ -883,20 +905,49 @@
   // last GC.
   static int old_gen_exhausted_;
 
-  // Declare all the roots
-#define ROOT_DECLARATION(type, name) static type* name##_;
-  ROOT_LIST(ROOT_DECLARATION)
-#undef ROOT_DECLARATION
+  // Declare all the root indices.
+  enum RootListIndex {
+#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
+    STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
+#undef ROOT_INDEX_DECLARATION
 
 // Utility type maps
-#define DECLARE_STRUCT_MAP(NAME, Name, name) static Map* name##_map_;
+#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
   STRUCT_LIST(DECLARE_STRUCT_MAP)
 #undef DECLARE_STRUCT_MAP
 
-#define SYMBOL_DECLARATION(name, str) static String* name##_;
-  SYMBOL_LIST(SYMBOL_DECLARATION)
+#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
+    SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
 #undef SYMBOL_DECLARATION
 
+    kSymbolTableRootIndex,
+    kStrongRootListLength = kSymbolTableRootIndex,
+    kRootListLength
+  };
+
+  static Object* roots_[kRootListLength];
+
+  struct StringTypeTable {
+    InstanceType type;
+    int size;
+    RootListIndex index;
+  };
+
+  struct ConstantSymbolTable {
+    const char* contents;
+    RootListIndex index;
+  };
+
+  struct StructTable {
+    InstanceType type;
+    int size;
+    RootListIndex index;
+  };
+
+  static const StringTypeTable string_type_table[];
+  static const ConstantSymbolTable constant_symbol_table[];
+  static const StructTable struct_table[];
+
   // The special hidden symbol which is an empty string, but does not match
   // any string when looked up in properties.
   static String* hidden_symbol_;
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index f3cb854..02bde2a 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -114,8 +114,10 @@
 
   CodeDesc desc;
   assm.GetCode(&desc);
-  Object* code =
-      Heap::CreateCode(desc, NULL, Code::ComputeFlags(Code::STUB), NULL);
+  Object* code = Heap::CreateCode(desc,
+                                  NULL,
+                                  Code::ComputeFlags(Code::STUB),
+                                  Handle<Code>::null());
   if (!code->IsCode()) return;
   LOG(CodeCreateEvent(Logger::BUILTIN_TAG,
                       Code::cast(code), "CpuFeatures::Probe"));
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index ce4981d..2ee826e 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -1149,6 +1149,9 @@
   if (!is_dont_delete) {
     __ cmp(eax, Factory::the_hole_value());
     __ j(equal, &miss, not_taken);
+  } else if (FLAG_debug_code) {
+    __ cmp(eax, Factory::the_hole_value());
+    __ Check(not_equal, "DontDelete cells can't contain the hole");
   }
 
   __ ret(0);
diff --git a/src/jsregexp.cc b/src/jsregexp.cc
index 879f671..852d431 100644
--- a/src/jsregexp.cc
+++ b/src/jsregexp.cc
@@ -263,7 +263,6 @@
 
 // Irregexp implementation.
 
-
 // Ensures that the regexp object contains a compiled version of the
 // source for either ASCII or non-ASCII strings.
 // If the compiled version doesn't already exist, it is compiled
@@ -271,25 +270,26 @@
 // If compilation fails, an exception is thrown and this function
 // returns false.
 bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) {
-  int index;
-  if (is_ascii) {
-    index = JSRegExp::kIrregexpASCIICodeIndex;
-  } else {
-    index = JSRegExp::kIrregexpUC16CodeIndex;
-  }
-  Object* entry = re->DataAt(index);
-  if (!entry->IsTheHole()) {
-    // A value has already been compiled.
-    if (entry->IsJSObject()) {
-      // If it's a JS value, it's an error.
-      Top::Throw(entry);
-      return false;
-    }
-    return true;
-  }
+#ifdef V8_NATIVE_REGEXP
+  if (re->DataAt(JSRegExp::code_index(is_ascii))->IsCode()) return true;
+#else  // ! V8_NATIVE_REGEXP (RegExp interpreter code)
+  if (re->DataAt(JSRegExp::code_index(is_ascii))->IsByteArray()) return true;
+#endif
+  return CompileIrregexp(re, is_ascii);
+}
 
+
+bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
   // Compile the RegExp.
   CompilationZoneScope zone_scope(DELETE_ON_EXIT);
+  Object* entry = re->DataAt(JSRegExp::code_index(is_ascii));
+  if (entry->IsJSObject()) {
+    // If it's a JSObject, a previous compilation failed and threw this object.
+    // Re-throw the object without trying again.
+    Top::Throw(entry);
+    return false;
+  }
+  ASSERT(entry->IsTheHole());
 
   JSRegExp::Flags flags = re->GetFlags();
 
@@ -302,7 +302,7 @@
   FlatStringReader reader(pattern);
   if (!ParseRegExp(&reader, flags.is_multiline(), &compile_data)) {
     // Throw an exception if we fail to parse the pattern.
-    // THIS SHOULD NOT HAPPEN. We already parsed it successfully once.
+    // THIS SHOULD NOT HAPPEN. We already pre-parsed it successfully once.
     ThrowRegExpException(re,
                          pattern,
                          compile_data.error,
@@ -325,17 +325,15 @@
     Handle<Object> regexp_err =
         Factory::NewSyntaxError("malformed_regexp", array);
     Top::Throw(*regexp_err);
-    re->SetDataAt(index, *regexp_err);
+    re->SetDataAt(JSRegExp::code_index(is_ascii), *regexp_err);
     return false;
   }
 
-  NoHandleAllocation no_handles;
-
-  FixedArray* data = FixedArray::cast(re->data());
-  data->set(index, result.code);
-  int register_max = IrregexpMaxRegisterCount(data);
+  Handle<FixedArray> data = Handle<FixedArray>(FixedArray::cast(re->data()));
+  data->set(JSRegExp::code_index(is_ascii), result.code);
+  int register_max = IrregexpMaxRegisterCount(*data);
   if (result.num_registers > register_max) {
-    SetIrregexpMaxRegisterCount(data, result.num_registers);
+    SetIrregexpMaxRegisterCount(*data, result.num_registers);
   }
 
   return true;
@@ -364,24 +362,12 @@
 
 
 ByteArray* RegExpImpl::IrregexpByteCode(FixedArray* re, bool is_ascii) {
-  int index;
-  if (is_ascii) {
-    index = JSRegExp::kIrregexpASCIICodeIndex;
-  } else {
-    index = JSRegExp::kIrregexpUC16CodeIndex;
-  }
-  return ByteArray::cast(re->get(index));
+  return ByteArray::cast(re->get(JSRegExp::code_index(is_ascii)));
 }
 
 
 Code* RegExpImpl::IrregexpNativeCode(FixedArray* re, bool is_ascii) {
-  int index;
-  if (is_ascii) {
-    index = JSRegExp::kIrregexpASCIICodeIndex;
-  } else {
-    index = JSRegExp::kIrregexpUC16CodeIndex;
-  }
-  return Code::cast(re->get(index));
+  return Code::cast(re->get(JSRegExp::code_index(is_ascii)));
 }
 
 
@@ -408,6 +394,7 @@
   int number_of_capture_registers =
       (IrregexpNumberOfCaptures(FixedArray::cast(jsregexp->data())) + 1) * 2;
 
+#ifndef V8_NATIVE_REGEXP
 #ifdef DEBUG
   if (FLAG_trace_regexp_bytecodes) {
     String* pattern = jsregexp->Pattern();
@@ -415,6 +402,7 @@
     PrintF("\n\nSubject string: '%s'\n\n", *(subject->ToCString()));
   }
 #endif
+#endif
 
   if (!subject->IsFlat()) {
     FlattenString(subject);
@@ -422,88 +410,83 @@
 
   last_match_info->EnsureSize(number_of_capture_registers + kLastMatchOverhead);
 
-  bool rc;
-  // We have to initialize this with something to make gcc happy but we can't
-  // initialize it with its real value until after the GC-causing things are
-  // over.
-  FixedArray* array = NULL;
+  Handle<FixedArray> array;
 
   // Dispatch to the correct RegExp implementation.
-  Handle<String> original_subject = subject;
   Handle<FixedArray> regexp(FixedArray::cast(jsregexp->data()));
-  if (UseNativeRegexp()) {
+#ifdef V8_NATIVE_REGEXP
 #if V8_TARGET_ARCH_IA32
-    OffsetsVector captures(number_of_capture_registers);
-    int* captures_vector = captures.vector();
-    RegExpMacroAssemblerIA32::Result res;
-    do {
-      bool is_ascii = subject->IsAsciiRepresentation();
-      if (!EnsureCompiledIrregexp(jsregexp, is_ascii)) {
-        return Handle<Object>::null();
-      }
-      Handle<Code> code(RegExpImpl::IrregexpNativeCode(*regexp, is_ascii));
-      res = RegExpMacroAssemblerIA32::Match(code,
-                                            subject,
-                                            captures_vector,
-                                            captures.length(),
-                                            previous_index);
-      // If result is RETRY, the string have changed representation, and we
-      // must restart from scratch.
-    } while (res == RegExpMacroAssemblerIA32::RETRY);
-    if (res == RegExpMacroAssemblerIA32::EXCEPTION) {
-      ASSERT(Top::has_pending_exception());
-      return Handle<Object>::null();
-    }
-    ASSERT(res == RegExpMacroAssemblerIA32::SUCCESS
-        || res == RegExpMacroAssemblerIA32::FAILURE);
-
-    rc = (res == RegExpMacroAssemblerIA32::SUCCESS);
-    if (!rc) return Factory::null_value();
-
-    array = last_match_info->elements();
-    ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead);
-    // The captures come in (start, end+1) pairs.
-    for (int i = 0; i < number_of_capture_registers; i += 2) {
-      SetCapture(array, i, captures_vector[i]);
-      SetCapture(array, i + 1, captures_vector[i + 1]);
-    }
-#else  // !V8_TARGET_ARCH_IA32
-    UNREACHABLE();
-#endif
-  } else {
+  OffsetsVector captures(number_of_capture_registers);
+  int* captures_vector = captures.vector();
+  RegExpMacroAssemblerIA32::Result res;
+  do {
     bool is_ascii = subject->IsAsciiRepresentation();
     if (!EnsureCompiledIrregexp(jsregexp, is_ascii)) {
       return Handle<Object>::null();
     }
-    // Now that we have done EnsureCompiledIrregexp we can get the number of
-    // registers.
-    int number_of_registers =
-        IrregexpNumberOfRegisters(FixedArray::cast(jsregexp->data()));
-    OffsetsVector registers(number_of_registers);
-    int* register_vector = registers.vector();
-    for (int i = number_of_capture_registers - 1; i >= 0; i--) {
-      register_vector[i] = -1;
-    }
-    Handle<ByteArray> byte_codes(IrregexpByteCode(*regexp, is_ascii));
+    Handle<Code> code(RegExpImpl::IrregexpNativeCode(*regexp, is_ascii));
+    res = RegExpMacroAssemblerIA32::Match(code,
+                                          subject,
+                                          captures_vector,
+                                          captures.length(),
+                                          previous_index);
+    // If result is RETRY, the string have changed representation, and we
+    // must restart from scratch.
+  } while (res == RegExpMacroAssemblerIA32::RETRY);
+  if (res == RegExpMacroAssemblerIA32::EXCEPTION) {
+    ASSERT(Top::has_pending_exception());
+    return Handle<Object>::null();
+  }
+  ASSERT(res == RegExpMacroAssemblerIA32::SUCCESS
+      || res == RegExpMacroAssemblerIA32::FAILURE);
 
-    rc = IrregexpInterpreter::Match(byte_codes,
-                                    subject,
-                                    register_vector,
-                                    previous_index);
-    if (!rc) return Factory::null_value();
+  if (res != RegExpMacroAssemblerIA32::SUCCESS) return Factory::null_value();
 
-    array = last_match_info->elements();
-    ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead);
-    // The captures come in (start, end+1) pairs.
-    for (int i = 0; i < number_of_capture_registers; i += 2) {
-      SetCapture(array, i, register_vector[i]);
-      SetCapture(array, i + 1, register_vector[i + 1]);
-    }
+  array = Handle<FixedArray>(last_match_info->elements());
+  ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead);
+  // The captures come in (start, end+1) pairs.
+  for (int i = 0; i < number_of_capture_registers; i += 2) {
+    SetCapture(*array, i, captures_vector[i]);
+    SetCapture(*array, i + 1, captures_vector[i + 1]);
+  }
+#else  // !V8_TARGET_ARCH_IA32
+    UNREACHABLE();
+#endif  // V8_TARGET_ARCH_IA32
+#else  // !V8_NATIVE_REGEXP
+  bool is_ascii = subject->IsAsciiRepresentation();
+  if (!EnsureCompiledIrregexp(jsregexp, is_ascii)) {
+    return Handle<Object>::null();
+  }
+  // Now that we have done EnsureCompiledIrregexp we can get the number of
+  // registers.
+  int number_of_registers =
+      IrregexpNumberOfRegisters(FixedArray::cast(jsregexp->data()));
+  OffsetsVector registers(number_of_registers);
+  int* register_vector = registers.vector();
+  for (int i = number_of_capture_registers - 1; i >= 0; i--) {
+    register_vector[i] = -1;
+  }
+  Handle<ByteArray> byte_codes(IrregexpByteCode(*regexp, is_ascii));
+
+  if (!IrregexpInterpreter::Match(byte_codes,
+                                  subject,
+                                  register_vector,
+                                  previous_index)) {
+    return Factory::null_value();
   }
 
-  SetLastCaptureCount(array, number_of_capture_registers);
-  SetLastSubject(array, *original_subject);
-  SetLastInput(array, *original_subject);
+  array = Handle<FixedArray>(last_match_info->elements());
+  ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead);
+  // The captures come in (start, end+1) pairs.
+  for (int i = 0; i < number_of_capture_registers; i += 2) {
+    SetCapture(*array, i, register_vector[i]);
+    SetCapture(*array, i + 1, register_vector[i + 1]);
+  }
+#endif  // V8_NATIVE_REGEXP
+
+  SetLastCaptureCount(*array, number_of_capture_registers);
+  SetLastSubject(*array, *subject);
+  SetLastInput(*array, *subject);
 
   return last_match_info;
 }
@@ -4474,35 +4457,38 @@
 
   NodeInfo info = *node->info();
 
-  if (RegExpImpl::UseNativeRegexp()) {
+#ifdef V8_NATIVE_REGEXP
 #ifdef V8_TARGET_ARCH_ARM
-    UNREACHABLE();
+  // ARM native regexp not implemented yet.
+  UNREACHABLE();
 #endif
 #ifdef V8_TARGET_ARCH_X64
-    UNREACHABLE();
+  // X64 native regexp not implemented yet.
+  UNREACHABLE();
 #endif
 #ifdef V8_TARGET_ARCH_IA32
-    RegExpMacroAssemblerIA32::Mode mode;
-    if (is_ascii) {
-      mode = RegExpMacroAssemblerIA32::ASCII;
-    } else {
-      mode = RegExpMacroAssemblerIA32::UC16;
-    }
-    RegExpMacroAssemblerIA32 macro_assembler(mode,
-                                             (data->capture_count + 1) * 2);
-    return compiler.Assemble(&macro_assembler,
-                             node,
-                             data->capture_count,
-                             pattern);
-#endif
+  RegExpMacroAssemblerIA32::Mode mode;
+  if (is_ascii) {
+    mode = RegExpMacroAssemblerIA32::ASCII;
+  } else {
+    mode = RegExpMacroAssemblerIA32::UC16;
   }
+  RegExpMacroAssemblerIA32 macro_assembler(mode,
+                                           (data->capture_count + 1) * 2);
+  return compiler.Assemble(&macro_assembler,
+                           node,
+                           data->capture_count,
+                           pattern);
+#endif
+#else  // ! V8_NATIVE_REGEXP
+  // Interpreted regexp.
   EmbeddedVector<byte, 1024> codes;
   RegExpMacroAssemblerIrregexp macro_assembler(codes);
   return compiler.Assemble(&macro_assembler,
                            node,
                            data->capture_count,
                            pattern);
+#endif  // V8_NATIVE_REGEXP
 }
 
-
 }}  // namespace v8::internal
diff --git a/src/jsregexp.h b/src/jsregexp.h
index a86f7e6..0e7965c 100644
--- a/src/jsregexp.h
+++ b/src/jsregexp.h
@@ -37,13 +37,15 @@
 
 class RegExpImpl {
  public:
-  static inline bool UseNativeRegexp() {
-#ifdef V8_TARGET_ARCH_IA32
-    return FLAG_regexp_native;
+  // Whether V8 is compiled with native regexp support or not.
+  static bool UsesNativeRegExp() {
+#ifdef V8_NATIVE_REGEXP
+    return true;
 #else
-  return false;
+    return false;
 #endif
   }
+
   // Creates a regular expression literal in the old space.
   // This function calls the garbage collector if necessary.
   static Handle<Object> CreateRegExpLiteral(Handle<JSFunction> constructor,
@@ -148,7 +150,8 @@
   static String* last_ascii_string_;
   static String* two_byte_cached_string_;
 
-  static bool EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii);
+  static bool CompileIrregexp(Handle<JSRegExp> re, bool is_ascii);
+  static inline bool EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii);
 
 
   // Set the subject cache.  The previous string buffer is not deleted, so the
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 5e46f2a..8ab0264 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -224,7 +224,9 @@
   if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
 
   Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
-  if (reinterpret_cast<String*>(second) != Heap::empty_string()) return object;
+  if (second != Heap::raw_unchecked_empty_string()) {
+    return object;
+  }
 
   // Since we don't have the object's start, it is impossible to update the
   // remembered set.  Therefore, we only replace the string with its left
@@ -421,7 +423,7 @@
           }
         }
         // Set the entry to null_value (as deleted).
-        *p = Heap::null_value();
+        *p = Heap::raw_unchecked_null_value();
         pointers_removed_++;
       }
     }
@@ -475,7 +477,7 @@
     DescriptorArray* descriptors) {
   if (descriptors->IsMarked()) return;
   // Empty descriptor array is marked as a root before any maps are marked.
-  ASSERT(descriptors != Heap::empty_descriptor_array());
+  ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array());
   SetMark(descriptors);
 
   FixedArray* contents = reinterpret_cast<FixedArray*>(
@@ -590,7 +592,7 @@
   // and if it is a sliced string or a cons string backed by an
   // external string (even indirectly), then the external string does
   // not receive a weak reference callback.
-  SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table());
+  SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
   // Mark the symbol table itself.
   SetMark(symbol_table);
   // Explicitly mark the prefix.
@@ -780,10 +782,9 @@
   ProcessObjectGroups(root_visitor.stack_visitor());
 
   // Prune the symbol table removing all symbols only pointed to by the
-  // symbol table.  Cannot use SymbolTable::cast here because the symbol
+  // symbol table.  Cannot use symbol_table() here because the symbol
   // table is marked.
-  SymbolTable* symbol_table =
-      reinterpret_cast<SymbolTable*>(Heap::symbol_table());
+  SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
   SymbolTableCleaner v;
   symbol_table->IterateElements(&v);
   symbol_table->ElementsRemoved(v.PointersRemoved());
@@ -1142,11 +1143,11 @@
       // since their existing map might not be live after the collection.
       int size = object->Size();
       if (size >= ByteArray::kHeaderSize) {
-        object->set_map(Heap::byte_array_map());
+        object->set_map(Heap::raw_unchecked_byte_array_map());
         ByteArray::cast(object)->set_length(ByteArray::LengthFor(size));
       } else {
         ASSERT(size == kPointerSize);
-        object->set_map(Heap::one_word_filler_map());
+        object->set_map(Heap::raw_unchecked_one_word_filler_map());
       }
       ASSERT(object->Size() == size);
     }
diff --git a/src/math.js b/src/math.js
index d12927e..db75cb2 100644
--- a/src/math.js
+++ b/src/math.js
@@ -68,10 +68,12 @@
 }
 
 // ECMA 262 - 15.8.2.5
-function MathAtan2(x, y) {
-  if (!IS_NUMBER(x)) x = ToNumber(x);
+// The naming of y and x matches the spec, as does the order in which
+// ToNumber (valueOf) is called.
+function MathAtan2(y, x) {
   if (!IS_NUMBER(y)) y = ToNumber(y);
-  return %Math_atan2(x, y);
+  if (!IS_NUMBER(x)) x = ToNumber(x);
+  return %Math_atan2(y, x);
 }
 
 // ECMA 262 - 15.8.2.6
@@ -117,11 +119,12 @@
 // ECMA 262 - 15.8.2.11
 function MathMax(arg1, arg2) {  // length == 2
   var r = -$Infinity;
-  for (var i = %_ArgumentsLength() - 1; i >= 0; --i) {
+  var length = %_ArgumentsLength();
+  for (var i = 0; i < length; i++) {
     var n = ToNumber(%_Arguments(i));
     if (NUMBER_IS_NAN(n)) return n;
-    // Make sure +0 is consider greater than -0.
-    if (n > r || (n === 0 && r === 0 && (1 / n) > (1 / r))) r = n;
+    // Make sure +0 is considered greater than -0.
+    if (n > r || (r === 0 && n === 0 && !%_IsSmi(r))) r = n;
   }
   return r;
 }
@@ -129,11 +132,12 @@
 // ECMA 262 - 15.8.2.12
 function MathMin(arg1, arg2) {  // length == 2
   var r = $Infinity;
-  for (var i = %_ArgumentsLength() - 1; i >= 0; --i) {
+  var length = %_ArgumentsLength();
+  for (var i = 0; i < length; i++) {
     var n = ToNumber(%_Arguments(i));
     if (NUMBER_IS_NAN(n)) return n;
-    // Make sure -0 is consider less than +0.
-    if (n < r || (n === 0 && r === 0 && (1 / n) < (1 / r))) r = n;
+    // Make sure -0 is considered less than +0.
+    if (n < r || (r === 0 && n === 0 && !%_IsSmi(n))) r = n;
   }
   return r;
 }
diff --git a/src/messages.js b/src/messages.js
index 6157874..870c969 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -60,10 +60,8 @@
   unexpected_token_string:      "Unexpected string",
   unexpected_token_identifier:  "Unexpected identifier",
   unexpected_eos:               "Unexpected end of input",
-  expected_label:               "Expected label",
   malformed_regexp:             "Invalid regular expression: /%0/: %1",
   unterminated_regexp:          "Invalid regular expression: missing /",
-  pcre_error:                   "PCRE function %0, error code %1",
   regexp_flags:                 "Cannot supply flags when constructing one RegExp from another",
   invalid_lhs_in_assignment:    "Invalid left-hand side in assignment",
   invalid_lhs_in_for_in:        "Invalid left-hand side in for-in",
@@ -74,21 +72,17 @@
   redeclaration:                "%0 '%1' has already been declared",
   no_catch_or_finally:          "Missing catch or finally after try",
   unknown_label:                "Undefined label '%0'",
-  invalid_break:                "Invalid break statement",
-  invalid_continue:             "Invalid continue statement",
   uncaught_exception:           "Uncaught %0",
   stack_trace:                  "Stack Trace:\n%0",
   called_non_callable:          "%0 is not a function",
   undefined_method:             "Object %1 has no method '%0'",
   property_not_function:        "Property '%0' of object %1 is not a function",
-  null_or_undefined:            "Cannot access property of null or undefined",
   cannot_convert_to_primitive:  "Cannot convert object to primitive value",
   not_constructor:              "%0 is not a constructor",
   not_defined:                  "%0 is not defined",
   non_object_property_load:     "Cannot read property '%0' of %1",
   non_object_property_store:    "Cannot set property '%0' of %1",
   non_object_property_call:     "Cannot call method '%0' of %1",
-  illegal_eval:                 "Unsupported indirect eval() call",
   with_expression:              "%0 has no properties",
   illegal_invocation:           "Illegal invocation",
   no_setter_in_callback:        "Cannot set property %0 of %1 which has only a getter",
@@ -101,13 +95,11 @@
   reduce_no_initial:            "Reduce of empty array with no initial value",
   // RangeError
   invalid_array_length:         "Invalid array length",
-  invalid_array_apply_length:   "Function.prototype.apply supports only up to 1024 arguments",
   stack_overflow:               "Maximum call stack size exceeded",
   apply_overflow:               "Function.prototype.apply cannot support %0 arguments",
   // SyntaxError
   unable_to_parse:              "Parse error",
   duplicate_regexp_flag:        "Duplicate RegExp flag %0",
-  unrecognized_regexp_flag:     "Unrecognized RegExp flag %0",
   invalid_regexp:               "Invalid RegExp pattern /%0/",
   illegal_break:                "Illegal break statement",
   illegal_continue:             "Illegal continue statement",
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index f0eacad..4974268 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -714,7 +714,7 @@
       break;
     }
     case JSRegExp::IRREGEXP: {
-      bool is_native = RegExpImpl::UseNativeRegexp();
+      bool is_native = RegExpImpl::UsesNativeRegExp();
 
       FixedArray* arr = FixedArray::cast(data());
       Object* ascii_data = arr->get(JSRegExp::kIrregexpASCIICodeIndex);
diff --git a/src/objects-inl.h b/src/objects-inl.h
index ff0f2e5..3b152d6 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -481,7 +481,7 @@
 
 
 bool Object::IsSymbolTable() {
-  return IsHashTable() && this == Heap::symbol_table();
+  return IsHashTable() && this == Heap::raw_unchecked_symbol_table();
 }
 
 
@@ -2655,8 +2655,8 @@
   // No write barrier is needed since empty_fixed_array is not in new space.
   // Please note this function is used during marking:
   //  - MarkCompactCollector::MarkUnmarkedObject
-  ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
-  WRITE_FIELD(this, kCodeCacheOffset, Heap::empty_fixed_array());
+  ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array()));
+  WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array());
 }
 
 
diff --git a/src/objects.cc b/src/objects.cc
index ee0ac2d..93e7495 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -467,8 +467,15 @@
     // If we have a global object set the cell to the hole.
     if (IsGlobalObject()) {
       PropertyDetails details = dictionary->DetailsAt(entry);
-      if (details.IsDontDelete() && mode != FORCE_DELETION) {
-        return Heap::false_value();
+      if (details.IsDontDelete()) {
+        if (mode != FORCE_DELETION) return Heap::false_value();
+        // When forced to delete global properties, we have to make a
+        // map change to invalidate any ICs that think they can load
+        // from the DontDelete cell without checking if it contains
+        // the hole value.
+        Object* new_map = map()->CopyDropDescriptors();
+        if (new_map->IsFailure()) return new_map;
+        set_map(Map::cast(new_map));
       }
       JSGlobalPropertyCell* cell =
           JSGlobalPropertyCell::cast(dictionary->ValueAt(entry));
@@ -1711,6 +1718,10 @@
       if (IsGlobalObject()) {
         PropertyDetails d = property_dictionary()->DetailsAt(entry);
         if (d.IsDeleted()) {
+          // We've skipped a global object during lookup, so we cannot
+          // use inline caching because the map of the global object
+          // doesn't change if the property should be re-added.
+          result->DisallowCaching();
           result->NotFound();
           return;
         }
@@ -1865,7 +1876,7 @@
       if (value == result->GetConstantFunction()) return value;
       // Preserve the attributes of this existing property.
       attributes = result->GetAttributes();
-      return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
+      return ConvertDescriptorToField(name, value, attributes);
     case CALLBACKS:
       return SetPropertyWithCallback(result->GetCallbackObject(),
                                      name,
@@ -1928,7 +1939,7 @@
   if (!result->IsLoaded()) {
     return SetLazyProperty(result, name, value, attributes);
   }
-  //  Check of IsReadOnly removed from here in clone.
+  // Check of IsReadOnly removed from here in clone.
   switch (result->type()) {
     case NORMAL:
       return SetNormalizedProperty(result, value);
@@ -1947,7 +1958,7 @@
       if (value == result->GetConstantFunction()) return value;
       // Preserve the attributes of this existing property.
       attributes = result->GetAttributes();
-      return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
+      return ConvertDescriptorToField(name, value, attributes);
     case CALLBACKS:
     case INTERCEPTOR:
       // Override callback in clone
@@ -4604,7 +4615,7 @@
   // low-level accessors to get and modify their data.
   DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
       *RawField(this, Map::kInstanceDescriptorsOffset));
-  if (d == Heap::empty_descriptor_array()) return;
+  if (d == Heap::raw_unchecked_empty_descriptor_array()) return;
   Smi* NullDescriptorDetails =
     PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi();
   FixedArray* contents = reinterpret_cast<FixedArray*>(
@@ -5825,11 +5836,10 @@
 }
 
 
-bool JSObject::GetPropertyWithInterceptorProper(
+Object* JSObject::GetPropertyWithInterceptorProper(
     JSObject* receiver,
     String* name,
-    PropertyAttributes* attributes,
-    Object** result_object) {
+    PropertyAttributes* attributes) {
   HandleScope scope;
   Handle<InterceptorInfo> interceptor(GetNamedInterceptor());
   Handle<JSObject> receiver_handle(receiver);
@@ -5850,17 +5860,14 @@
       VMState state(EXTERNAL);
       result = getter(v8::Utils::ToLocal(name_handle), info);
     }
-    if (Top::has_scheduled_exception()) {
-      return false;
-    }
-    if (!result.IsEmpty()) {
+    if (!Top::has_scheduled_exception() && !result.IsEmpty()) {
       *attributes = NONE;
-      *result_object = *v8::Utils::OpenHandle(*result);
-      return true;
+      return *v8::Utils::OpenHandle(*result);
     }
   }
 
-  return false;
+  *attributes = ABSENT;
+  return Heap::undefined_value();
 }
 
 
@@ -5874,12 +5881,13 @@
   Handle<JSObject> holder_handle(this);
   Handle<String> name_handle(name);
 
-  Object* result = NULL;
-  if (GetPropertyWithInterceptorProper(receiver, name, attributes, &result)) {
+  Object* result = GetPropertyWithInterceptorProper(receiver,
+                                                    name,
+                                                    attributes);
+  if (*attributes != ABSENT) {
     return result;
-  } else {
-    RETURN_IF_SCHEDULED_EXCEPTION();
   }
+  RETURN_IF_SCHEDULED_EXCEPTION();
 
   int property_index = lookup_hint->value();
   if (property_index >= 0) {
@@ -5924,12 +5932,11 @@
   Handle<JSObject> holder_handle(this);
   Handle<String> name_handle(name);
 
-  Object* result = NULL;
-  if (GetPropertyWithInterceptorProper(receiver, name, attributes, &result)) {
+  Object* result = GetPropertyWithInterceptorProper(receiver, name, attributes);
+  if (*attributes != ABSENT) {
     return result;
-  } else {
-    RETURN_IF_SCHEDULED_EXCEPTION();
   }
+  RETURN_IF_SCHEDULED_EXCEPTION();
 
   result = holder_handle->GetPropertyPostInterceptor(
       *receiver_handle,
diff --git a/src/objects.h b/src/objects.h
index ebd0bb4..446b4a7 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -297,97 +297,202 @@
   V(JS_FUNCTION_TYPE)                           \
 
 
+
 // Since string types are not consecutive, this macro is used to
 // iterate over them.
 #define STRING_TYPE_LIST(V)                                                    \
-  V(SHORT_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, short_symbol)           \
-  V(MEDIUM_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, medium_symbol)         \
-  V(LONG_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, long_symbol)             \
-  V(SHORT_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize, short_ascii_symbol) \
+  V(SHORT_SYMBOL_TYPE,                                                         \
+    SeqTwoByteString::kAlignedSize,                                            \
+    short_symbol,                                                              \
+    ShortSymbol)                                                               \
+  V(MEDIUM_SYMBOL_TYPE,                                                        \
+    SeqTwoByteString::kAlignedSize,                                            \
+    medium_symbol,                                                             \
+    MediumSymbol)                                                              \
+  V(LONG_SYMBOL_TYPE,                                                          \
+    SeqTwoByteString::kAlignedSize,                                            \
+    long_symbol,                                                               \
+    LongSymbol)                                                                \
+  V(SHORT_ASCII_SYMBOL_TYPE,                                                   \
+    SeqAsciiString::kAlignedSize,                                              \
+    short_ascii_symbol,                                                        \
+    ShortAsciiSymbol)                                                          \
   V(MEDIUM_ASCII_SYMBOL_TYPE,                                                  \
     SeqAsciiString::kAlignedSize,                                              \
-    medium_ascii_symbol)                                                       \
-  V(LONG_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize, long_ascii_symbol)   \
-  V(SHORT_CONS_SYMBOL_TYPE, ConsString::kSize, short_cons_symbol)              \
-  V(MEDIUM_CONS_SYMBOL_TYPE, ConsString::kSize, medium_cons_symbol)            \
-  V(LONG_CONS_SYMBOL_TYPE, ConsString::kSize, long_cons_symbol)                \
-  V(SHORT_CONS_ASCII_SYMBOL_TYPE, ConsString::kSize, short_cons_ascii_symbol)  \
-  V(MEDIUM_CONS_ASCII_SYMBOL_TYPE, ConsString::kSize, medium_cons_ascii_symbol)\
-  V(LONG_CONS_ASCII_SYMBOL_TYPE, ConsString::kSize, long_cons_ascii_symbol)    \
-  V(SHORT_SLICED_SYMBOL_TYPE, SlicedString::kSize, short_sliced_symbol)        \
-  V(MEDIUM_SLICED_SYMBOL_TYPE, SlicedString::kSize, medium_sliced_symbol)      \
-  V(LONG_SLICED_SYMBOL_TYPE, SlicedString::kSize, long_sliced_symbol)          \
+    medium_ascii_symbol,                                                       \
+    MediumAsciiSymbol)                                                         \
+  V(LONG_ASCII_SYMBOL_TYPE,                                                    \
+    SeqAsciiString::kAlignedSize,                                              \
+    long_ascii_symbol,                                                         \
+    LongAsciiSymbol)                                                           \
+  V(SHORT_CONS_SYMBOL_TYPE,                                                    \
+    ConsString::kSize,                                                         \
+    short_cons_symbol,                                                         \
+    ShortConsSymbol)                                                           \
+  V(MEDIUM_CONS_SYMBOL_TYPE,                                                   \
+    ConsString::kSize,                                                         \
+    medium_cons_symbol,                                                        \
+    MediumConsSymbol)                                                          \
+  V(LONG_CONS_SYMBOL_TYPE,                                                     \
+    ConsString::kSize,                                                         \
+    long_cons_symbol,                                                          \
+    LongConsSymbol)                                                            \
+  V(SHORT_CONS_ASCII_SYMBOL_TYPE,                                              \
+    ConsString::kSize,                                                         \
+    short_cons_ascii_symbol,                                                   \
+    ShortConsAsciiSymbol)                                                      \
+  V(MEDIUM_CONS_ASCII_SYMBOL_TYPE,                                             \
+    ConsString::kSize,                                                         \
+    medium_cons_ascii_symbol,                                                  \
+    MediumConsAsciiSymbol)                                                     \
+  V(LONG_CONS_ASCII_SYMBOL_TYPE,                                               \
+    ConsString::kSize,                                                         \
+    long_cons_ascii_symbol,                                                    \
+    LongConsAsciiSymbol)                                                       \
+  V(SHORT_SLICED_SYMBOL_TYPE,                                                  \
+    SlicedString::kSize,                                                       \
+    short_sliced_symbol,                                                       \
+    ShortSlicedSymbol)                                                         \
+  V(MEDIUM_SLICED_SYMBOL_TYPE,                                                 \
+    SlicedString::kSize,                                                       \
+    medium_sliced_symbol,                                                      \
+    MediumSlicedSymbol)                                                        \
+  V(LONG_SLICED_SYMBOL_TYPE,                                                   \
+    SlicedString::kSize,                                                       \
+    long_sliced_symbol,                                                        \
+    LongSlicedSymbol)                                                          \
   V(SHORT_SLICED_ASCII_SYMBOL_TYPE,                                            \
     SlicedString::kSize,                                                       \
-    short_sliced_ascii_symbol)                                                 \
+    short_sliced_ascii_symbol,                                                 \
+    ShortSlicedAsciiSymbol)                                                    \
   V(MEDIUM_SLICED_ASCII_SYMBOL_TYPE,                                           \
     SlicedString::kSize,                                                       \
-    medium_sliced_ascii_symbol)                                                \
+    medium_sliced_ascii_symbol,                                                \
+    MediumSlicedAsciiSymbol)                                                   \
   V(LONG_SLICED_ASCII_SYMBOL_TYPE,                                             \
     SlicedString::kSize,                                                       \
-    long_sliced_ascii_symbol)                                                  \
+    long_sliced_ascii_symbol,                                                  \
+    LongSlicedAsciiSymbol)                                                     \
   V(SHORT_EXTERNAL_SYMBOL_TYPE,                                                \
     ExternalTwoByteString::kSize,                                              \
-    short_external_symbol)                                                     \
+    short_external_symbol,                                                     \
+    ShortExternalSymbol)                                                       \
   V(MEDIUM_EXTERNAL_SYMBOL_TYPE,                                               \
     ExternalTwoByteString::kSize,                                              \
-    medium_external_symbol)                                                    \
+    medium_external_symbol,                                                    \
+    MediumExternalSymbol)                                                      \
   V(LONG_EXTERNAL_SYMBOL_TYPE,                                                 \
     ExternalTwoByteString::kSize,                                              \
-    long_external_symbol)                                                      \
+    long_external_symbol,                                                      \
+    LongExternalSymbol)                                                        \
   V(SHORT_EXTERNAL_ASCII_SYMBOL_TYPE,                                          \
     ExternalAsciiString::kSize,                                                \
-    short_external_ascii_symbol)                                               \
+    short_external_ascii_symbol,                                               \
+    ShortExternalAsciiSymbol)                                                  \
   V(MEDIUM_EXTERNAL_ASCII_SYMBOL_TYPE,                                         \
     ExternalAsciiString::kSize,                                                \
-    medium_external_ascii_symbol)                                              \
+    medium_external_ascii_symbol,                                              \
+    MediumExternalAsciiSymbol)                                                 \
   V(LONG_EXTERNAL_ASCII_SYMBOL_TYPE,                                           \
     ExternalAsciiString::kSize,                                                \
-    long_external_ascii_symbol)                                                \
-  V(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize, short_string)           \
-  V(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize, medium_string)         \
-  V(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize, long_string)             \
-  V(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize, short_ascii_string) \
+    long_external_ascii_symbol,                                                \
+    LongExternalAsciiSymbol)                                                   \
+  V(SHORT_STRING_TYPE,                                                         \
+    SeqTwoByteString::kAlignedSize,                                            \
+    short_string,                                                              \
+    ShortString)                                                               \
+  V(MEDIUM_STRING_TYPE,                                                        \
+    SeqTwoByteString::kAlignedSize,                                            \
+    medium_string,                                                             \
+    MediumString)                                                              \
+  V(LONG_STRING_TYPE,                                                          \
+    SeqTwoByteString::kAlignedSize,                                            \
+    long_string,                                                               \
+    LongString)                                                                \
+  V(SHORT_ASCII_STRING_TYPE,                                                   \
+    SeqAsciiString::kAlignedSize,                                              \
+    short_ascii_string,                                                        \
+    ShortAsciiString)                                                          \
   V(MEDIUM_ASCII_STRING_TYPE,                                                  \
     SeqAsciiString::kAlignedSize,                                              \
-    medium_ascii_string)                                                       \
-  V(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize, long_ascii_string)   \
-  V(SHORT_CONS_STRING_TYPE, ConsString::kSize, short_cons_string)              \
-  V(MEDIUM_CONS_STRING_TYPE, ConsString::kSize, medium_cons_string)            \
-  V(LONG_CONS_STRING_TYPE, ConsString::kSize, long_cons_string)                \
-  V(SHORT_CONS_ASCII_STRING_TYPE, ConsString::kSize, short_cons_ascii_string)  \
-  V(MEDIUM_CONS_ASCII_STRING_TYPE, ConsString::kSize, medium_cons_ascii_string)\
-  V(LONG_CONS_ASCII_STRING_TYPE, ConsString::kSize, long_cons_ascii_string)    \
-  V(SHORT_SLICED_STRING_TYPE, SlicedString::kSize, short_sliced_string)        \
-  V(MEDIUM_SLICED_STRING_TYPE, SlicedString::kSize, medium_sliced_string)      \
-  V(LONG_SLICED_STRING_TYPE, SlicedString::kSize, long_sliced_string)          \
+    medium_ascii_string,                                                       \
+    MediumAsciiString)                                                         \
+  V(LONG_ASCII_STRING_TYPE,                                                    \
+    SeqAsciiString::kAlignedSize,                                              \
+    long_ascii_string,                                                         \
+    LongAsciiString)                                                           \
+  V(SHORT_CONS_STRING_TYPE,                                                    \
+    ConsString::kSize,                                                         \
+    short_cons_string,                                                         \
+    ShortConsString)                                                           \
+  V(MEDIUM_CONS_STRING_TYPE,                                                   \
+    ConsString::kSize,                                                         \
+    medium_cons_string,                                                        \
+    MediumConsString)                                                          \
+  V(LONG_CONS_STRING_TYPE,                                                     \
+    ConsString::kSize,                                                         \
+    long_cons_string,                                                          \
+    LongConsString)                                                            \
+  V(SHORT_CONS_ASCII_STRING_TYPE,                                              \
+    ConsString::kSize,                                                         \
+    short_cons_ascii_string,                                                   \
+    ShortConsAsciiString)                                                      \
+  V(MEDIUM_CONS_ASCII_STRING_TYPE,                                             \
+    ConsString::kSize,                                                         \
+    medium_cons_ascii_string,                                                  \
+    MediumConsAsciiString)                                                     \
+  V(LONG_CONS_ASCII_STRING_TYPE,                                               \
+    ConsString::kSize,                                                         \
+    long_cons_ascii_string,                                                    \
+    LongConsAsciiString)                                                       \
+  V(SHORT_SLICED_STRING_TYPE,                                                  \
+    SlicedString::kSize,                                                       \
+    short_sliced_string,                                                       \
+    ShortSlicedString)                                                         \
+  V(MEDIUM_SLICED_STRING_TYPE,                                                 \
+    SlicedString::kSize,                                                       \
+    medium_sliced_string,                                                      \
+    MediumSlicedString)                                                        \
+  V(LONG_SLICED_STRING_TYPE,                                                   \
+    SlicedString::kSize,                                                       \
+    long_sliced_string,                                                        \
+    LongSlicedString)                                                          \
   V(SHORT_SLICED_ASCII_STRING_TYPE,                                            \
     SlicedString::kSize,                                                       \
-    short_sliced_ascii_string)                                                 \
+    short_sliced_ascii_string,                                                 \
+    ShortSlicedAsciiString)                                                    \
   V(MEDIUM_SLICED_ASCII_STRING_TYPE,                                           \
     SlicedString::kSize,                                                       \
-    medium_sliced_ascii_string)                                                \
+    medium_sliced_ascii_string,                                                \
+    MediumSlicedAsciiString)                                                   \
   V(LONG_SLICED_ASCII_STRING_TYPE,                                             \
     SlicedString::kSize,                                                       \
-    long_sliced_ascii_string)                                                  \
+    long_sliced_ascii_string,                                                  \
+    LongSlicedAsciiString)                                                     \
   V(SHORT_EXTERNAL_STRING_TYPE,                                                \
     ExternalTwoByteString::kSize,                                              \
-    short_external_string)                                                     \
+    short_external_string,                                                     \
+    ShortExternalString)                                                       \
   V(MEDIUM_EXTERNAL_STRING_TYPE,                                               \
     ExternalTwoByteString::kSize,                                              \
-    medium_external_string)                                                    \
+    medium_external_string,                                                    \
+    MediumExternalString)                                                      \
   V(LONG_EXTERNAL_STRING_TYPE,                                                 \
     ExternalTwoByteString::kSize,                                              \
-    long_external_string)                                                      \
+    long_external_string,                                                      \
+    LongExternalString)                                                        \
   V(SHORT_EXTERNAL_ASCII_STRING_TYPE,                                          \
     ExternalAsciiString::kSize,                                                \
-    short_external_ascii_string)                                               \
+    short_external_ascii_string,                                               \
+    ShortExternalAsciiString)                                                  \
   V(MEDIUM_EXTERNAL_ASCII_STRING_TYPE,                                         \
     ExternalAsciiString::kSize,                                                \
-    medium_external_ascii_string)                                              \
+    medium_external_ascii_string,                                              \
+    MediumExternalAsciiString)                                                 \
   V(LONG_EXTERNAL_ASCII_STRING_TYPE,                                           \
     ExternalAsciiString::kSize,                                                \
-    long_external_ascii_string)
+    long_external_ascii_string,                                                \
+    LongExternalAsciiString)
 
 // A struct is a simple object a set of object-valued fields.  Including an
 // object type in this causes the compiler to generate most of the boilerplate
@@ -1593,13 +1698,11 @@
 
   void LookupInDescriptor(String* name, LookupResult* result);
 
-  // Attempts to get property with a named interceptor getter.  Returns
-  // |true| and stores result into |result| if succesful, otherwise
-  // returns |false|
-  bool GetPropertyWithInterceptorProper(JSObject* receiver,
-                                        String* name,
-                                        PropertyAttributes* attributes,
-                                        Object** result);
+  // Attempts to get property with a named interceptor getter.
+  // Sets |attributes| to ABSENT if interceptor didn't return anything
+  Object* GetPropertyWithInterceptorProper(JSObject* receiver,
+                                           String* name,
+                                           PropertyAttributes* attributes);
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSObject);
 };
@@ -3272,6 +3375,13 @@
   inline Object* DataAt(int index);
   // Set implementation data after the object has been prepared.
   inline void SetDataAt(int index, Object* value);
+  static int code_index(bool is_ascii) {
+    if (is_ascii) {
+      return kIrregexpASCIICodeIndex;
+    } else {
+      return kIrregexpUC16CodeIndex;
+    }
+  }
 
   static inline JSRegExp* cast(Object* obj);
 
diff --git a/src/parser.cc b/src/parser.cc
index e1d9b71..89d6d5b 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -1576,10 +1576,10 @@
   // to the calling function context.
   if (top_scope_->is_function_scope()) {
     // Declare the variable in the function scope.
-    var = top_scope_->LookupLocal(name);
+    var = top_scope_->LocalLookup(name);
     if (var == NULL) {
       // Declare the name.
-      var = top_scope_->Declare(name, mode);
+      var = top_scope_->DeclareLocal(name, mode);
     } else {
       // The name was declared before; check for conflicting
       // re-declarations. If the previous declaration was a const or the
@@ -2045,7 +2045,7 @@
   //   'continue' Identifier? ';'
 
   Expect(Token::CONTINUE, CHECK_OK);
-  Handle<String> label(static_cast<String**>(NULL));
+  Handle<String> label = Handle<String>::null();
   Token::Value tok = peek();
   if (!scanner_.has_line_terminator_before_next() &&
       tok != Token::SEMICOLON && tok != Token::RBRACE && tok != Token::EOS) {
@@ -3466,8 +3466,8 @@
     while (!done) {
       Handle<String> param_name = ParseIdentifier(CHECK_OK);
       if (!is_pre_parsing_) {
-        top_scope_->AddParameter(top_scope_->Declare(param_name,
-                                                     Variable::VAR));
+        top_scope_->AddParameter(top_scope_->DeclareLocal(param_name,
+                                                          Variable::VAR));
         num_parameters++;
       }
       done = (peek() == Token::RPAREN);
diff --git a/src/runtime.cc b/src/runtime.cc
index aeda068..ea2690e 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -1016,16 +1016,16 @@
   ASSERT(args.length() == 4);
   CONVERT_ARG_CHECKED(JSRegExp, regexp, 0);
   CONVERT_ARG_CHECKED(String, subject, 1);
-  // Due to the way the JS files are constructed this must be less than the
+  // Due to the way the JS calls are constructed this must be less than the
   // length of a string, i.e. it is always a Smi.  We check anyway for security.
-  CONVERT_CHECKED(Smi, index, args[2]);
+  CONVERT_SMI_CHECKED(index, args[2]);
   CONVERT_ARG_CHECKED(JSArray, last_match_info, 3);
   RUNTIME_ASSERT(last_match_info->HasFastElements());
-  RUNTIME_ASSERT(index->value() >= 0);
-  RUNTIME_ASSERT(index->value() <= subject->length());
+  RUNTIME_ASSERT(index >= 0);
+  RUNTIME_ASSERT(index <= subject->length());
   Handle<Object> result = RegExpImpl::Exec(regexp,
                                            subject,
-                                           index->value(),
+                                           index,
                                            last_match_info);
   if (result.is_null()) return Failure::Exception();
   return *result;
@@ -2598,15 +2598,13 @@
         Object* value = receiver->FastPropertyAt(offset);
         return value->IsTheHole() ? Heap::undefined_value() : value;
       }
-      // Lookup cache miss.  Perform lookup and update the cache if
-      // appropriate.
+      // Lookup cache miss.  Perform lookup and update the cache if appropriate.
       LookupResult result;
       receiver->LocalLookup(key, &result);
       if (result.IsProperty() && result.IsLoaded() && result.type() == FIELD) {
         int offset = result.GetFieldIndex();
         KeyedLookupCache::Update(receiver_map, key, offset);
-        Object* value = receiver->FastPropertyAt(offset);
-        return value->IsTheHole() ? Heap::undefined_value() : value;
+        return receiver->FastPropertyAt(offset);
       }
     } else {
       // Attempt dictionary lookup.
@@ -2615,10 +2613,10 @@
       if ((entry != StringDictionary::kNotFound) &&
           (dictionary->DetailsAt(entry).type() == NORMAL)) {
         Object* value = dictionary->ValueAt(entry);
-        if (receiver->IsGlobalObject()) {
-           value = JSGlobalPropertyCell::cast(value)->value();
-        }
-        return value;
+        if (!receiver->IsGlobalObject()) return value;
+        value = JSGlobalPropertyCell::cast(value)->value();
+        if (!value->IsTheHole()) return value;
+        // If value is the hole do the general lookup.
       }
     }
   }
@@ -4155,16 +4153,21 @@
   }
 
   CONVERT_DOUBLE_CHECKED(y, args[1]);
-  if (y == 0.5) {
-    // It's not uncommon to use Math.pow(x, 0.5) to compute the square
-    // root of a number. To speed up such computations, we explictly
-    // check for this case and use the sqrt() function which is faster
-    // than pow().
-    return Heap::AllocateHeapNumber(sqrt(x));
-  } else if (y == -0.5) {
-    // Optimized using Math.pow(x, -0.5) == 1 / Math.pow(x, 0.5).
-    return Heap::AllocateHeapNumber(1.0 / sqrt(x));
-  } else if (y == 0) {
+
+  if (!isinf(x)) {
+    if (y == 0.5) {
+      // It's not uncommon to use Math.pow(x, 0.5) to compute the
+      // square root of a number. To speed up such computations, we
+      // explictly check for this case and use the sqrt() function
+      // which is faster than pow().
+      return Heap::AllocateHeapNumber(sqrt(x));
+    } else if (y == -0.5) {
+      // Optimized using Math.pow(x, -0.5) == 1 / Math.pow(x, 0.5).
+      return Heap::AllocateHeapNumber(1.0 / sqrt(x));
+    }
+  }
+
+  if (y == 0) {
     return Smi::FromInt(1);
   } else if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) {
     return Heap::nan_value();
diff --git a/src/scopes.cc b/src/scopes.cc
index 88b1c66..78ed035 100644
--- a/src/scopes.cc
+++ b/src/scopes.cc
@@ -71,28 +71,28 @@
 
 
 // Dummy constructor
-LocalsMap::LocalsMap(bool gotta_love_static_overloading) : HashMap()  {}
+VariableMap::VariableMap(bool gotta_love_static_overloading) : HashMap() {}
 
-LocalsMap::LocalsMap() : HashMap(Match, &LocalsMapAllocator, 8)  {}
-LocalsMap::~LocalsMap()  {}
+VariableMap::VariableMap() : HashMap(Match, &LocalsMapAllocator, 8) {}
+VariableMap::~VariableMap() {}
 
 
-Variable* LocalsMap::Declare(Scope* scope,
-                             Handle<String> name,
-                             Variable::Mode mode,
-                             bool is_valid_LHS,
-                             Variable::Kind kind) {
+Variable* VariableMap::Declare(Scope* scope,
+                               Handle<String> name,
+                               Variable::Mode mode,
+                               bool is_valid_lhs,
+                               Variable::Kind kind) {
   HashMap::Entry* p = HashMap::Lookup(name.location(), name->Hash(), true);
   if (p->value == NULL) {
     // The variable has not been declared yet -> insert it.
     ASSERT(p->key == name.location());
-    p->value = new Variable(scope, name, mode, is_valid_LHS, kind);
+    p->value = new Variable(scope, name, mode, is_valid_lhs, kind);
   }
   return reinterpret_cast<Variable*>(p->value);
 }
 
 
-Variable* LocalsMap::Lookup(Handle<String> name) {
+Variable* VariableMap::Lookup(Handle<String> name) {
   HashMap::Entry* p = HashMap::Lookup(name.location(), name->Hash(), false);
   if (p != NULL) {
     ASSERT(*reinterpret_cast<String**>(p->key) == *name);
@@ -110,7 +110,7 @@
 // Dummy constructor
 Scope::Scope()
   : inner_scopes_(0),
-    locals_(false),
+    variables_(false),
     temps_(0),
     params_(0),
     dynamics_(NULL),
@@ -168,27 +168,26 @@
   // instead load them directly from the stack. Currently, the only
   // such parameter is 'this' which is passed on the stack when
   // invoking scripts
-  { Variable* var =
-        locals_.Declare(this, Factory::this_symbol(), Variable::VAR,
-                        false, Variable::THIS);
-    var->rewrite_ = new Slot(var, Slot::PARAMETER, -1);
-    receiver_ = new VariableProxy(Factory::this_symbol(), true, false);
-    receiver_->BindTo(var);
-  }
+  Variable* var =
+      variables_.Declare(this, Factory::this_symbol(), Variable::VAR,
+                         false, Variable::THIS);
+  var->rewrite_ = new Slot(var, Slot::PARAMETER, -1);
+  receiver_ = new VariableProxy(Factory::this_symbol(), true, false);
+  receiver_->BindTo(var);
 
   if (is_function_scope()) {
     // Declare 'arguments' variable which exists in all functions.
-    // Note that it may never be accessed, in which case it won't
-    // be allocated during variable allocation.
-    locals_.Declare(this, Factory::arguments_symbol(), Variable::VAR,
-                    true, Variable::ARGUMENTS);
+    // Note that it might never be accessed, in which case it won't be
+    // allocated during variable allocation.
+    variables_.Declare(this, Factory::arguments_symbol(), Variable::VAR,
+                       true, Variable::ARGUMENTS);
   }
 }
 
 
 
-Variable* Scope::LookupLocal(Handle<String> name) {
-  return locals_.Lookup(name);
+Variable* Scope::LocalLookup(Handle<String> name) {
+  return variables_.Lookup(name);
 }
 
 
@@ -196,7 +195,7 @@
   for (Scope* scope = this;
        scope != NULL;
        scope = scope->outer_scope()) {
-    Variable* var = scope->LookupLocal(name);
+    Variable* var = scope->LocalLookup(name);
     if (var != NULL) return var;
   }
   return NULL;
@@ -210,18 +209,25 @@
 }
 
 
-Variable* Scope::Declare(Handle<String> name, Variable::Mode mode) {
+Variable* Scope::DeclareLocal(Handle<String> name, Variable::Mode mode) {
   // DYNAMIC variables are introduces during variable allocation,
   // INTERNAL variables are allocated explicitly, and TEMPORARY
   // variables are allocated via NewTemporary().
   ASSERT(mode == Variable::VAR || mode == Variable::CONST);
-  return locals_.Declare(this, name, mode, true, Variable::NORMAL);
+  return variables_.Declare(this, name, mode, true, Variable::NORMAL);
+}
+
+
+Variable* Scope::DeclareGlobal(Handle<String> name) {
+  ASSERT(is_global_scope());
+  return variables_.Declare(this, name, Variable::DYNAMIC, true,
+                            Variable::NORMAL);
 }
 
 
 void Scope::AddParameter(Variable* var) {
   ASSERT(is_function_scope());
-  ASSERT(LookupLocal(var->name()) == var);
+  ASSERT(LocalLookup(var->name()) == var);
   params_.Add(var);
 }
 
@@ -291,7 +297,9 @@
       locals->Add(var);
     }
   }
-  for (LocalsMap::Entry* p = locals_.Start(); p != NULL; p = locals_.Next(p)) {
+  for (VariableMap::Entry* p = variables_.Start();
+       p != NULL;
+       p = variables_.Next(p)) {
     Variable* var = reinterpret_cast<Variable*>(p->value);
     if (var->var_uses()->is_used()) {
       locals->Add(var);
@@ -410,8 +418,8 @@
 }
 
 
-static void PrintMap(PrettyPrinter* printer, int indent, LocalsMap* map) {
-  for (LocalsMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
+static void PrintMap(PrettyPrinter* printer, int indent, VariableMap* map) {
+  for (VariableMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
     Variable* var = reinterpret_cast<Variable*>(p->value);
     PrintVar(printer, indent, var);
   }
@@ -478,7 +486,7 @@
   }
 
   Indent(n1, "// local vars\n");
-  PrintMap(&printer, n1, &locals_);
+  PrintMap(&printer, n1, &variables_);
 
   Indent(n1, "// dynamic vars\n");
   if (dynamics_ != NULL) {
@@ -502,7 +510,7 @@
 
 Variable* Scope::NonLocal(Handle<String> name, Variable::Mode mode) {
   if (dynamics_ == NULL) dynamics_ = new DynamicScopePart();
-  LocalsMap* map = dynamics_->GetMap(mode);
+  VariableMap* map = dynamics_->GetMap(mode);
   Variable* var = map->Lookup(name);
   if (var == NULL) {
     // Declare a new non-local.
@@ -530,7 +538,7 @@
   bool guess = scope_calls_eval_;
 
   // Try to find the variable in this scope.
-  Variable* var = LookupLocal(name);
+  Variable* var = LocalLookup(name);
 
   if (var != NULL) {
     // We found a variable. If this is not an inner lookup, we are done.
@@ -621,8 +629,7 @@
             scope_calls_eval_ || outer_scope_calls_eval_)) {
         // We must have a global variable.
         ASSERT(global_scope != NULL);
-        var = new Variable(global_scope, proxy->name(),
-                           Variable::DYNAMIC, true, Variable::NORMAL);
+        var = global_scope->DeclareGlobal(proxy->name());
 
       } else if (scope_inside_with_) {
         // If we are inside a with statement we give up and look up
@@ -706,26 +713,26 @@
 
 
 bool Scope::MustAllocate(Variable* var) {
-  // Give var a read/write use if there is a chance it might be
-  // accessed via an eval() call, or if it is a global variable.
-  // This is only possible if the variable has a visible name.
+  // Give var a read/write use if there is a chance it might be accessed
+  // via an eval() call.  This is only possible if the variable has a
+  // visible name.
   if ((var->is_this() || var->name()->length() > 0) &&
       (var->is_accessed_from_inner_scope_ ||
        scope_calls_eval_ || inner_scope_calls_eval_ ||
-       scope_contains_with_ || var->is_global())) {
+       scope_contains_with_)) {
     var->var_uses()->RecordAccess(1);
   }
-  return var->var_uses()->is_used();
+  // Global variables do not need to be allocated.
+  return !var->is_global() && var->var_uses()->is_used();
 }
 
 
 bool Scope::MustAllocateInContext(Variable* var) {
   // If var is accessed from an inner scope, or if there is a
-  // possibility that it might be accessed from the current or
-  // an inner scope (through an eval() call), it must be allocated
-  // in the context.
-  // Exceptions: Global variables and temporary variables must
-  // never be allocated in the (FixedArray part of the) context.
+  // possibility that it might be accessed from the current or an inner
+  // scope (through an eval() call), it must be allocated in the
+  // context.  Exception: temporary variables are not allocated in the
+  // context.
   return
     var->mode() != Variable::TEMPORARY &&
     (var->is_accessed_from_inner_scope_ ||
@@ -755,7 +762,7 @@
 
 void Scope::AllocateParameterLocals() {
   ASSERT(is_function_scope());
-  Variable* arguments = LookupLocal(Factory::arguments_symbol());
+  Variable* arguments = LocalLookup(Factory::arguments_symbol());
   ASSERT(arguments != NULL);  // functions have 'arguments' declared implicitly
   if (MustAllocate(arguments) && !HasArgumentsParameter()) {
     // 'arguments' is used. Unless there is also a parameter called
@@ -865,7 +872,7 @@
   ASSERT(var->rewrite_ == NULL ||
          (!var->IsVariable(Factory::result_symbol())) ||
          (var->slot() == NULL || var->slot()->type() != Slot::LOCAL));
-  if (MustAllocate(var) && var->rewrite_ == NULL) {
+  if (var->rewrite_ == NULL && MustAllocate(var)) {
     if (MustAllocateInContext(var)) {
       AllocateHeapSlot(var);
     } else {
@@ -876,27 +883,21 @@
 
 
 void Scope::AllocateNonParameterLocals() {
-  // Each variable occurs exactly once in the locals_ list; all
-  // variables that have no rewrite yet are non-parameter locals.
-
-  // Sort them according to use such that the locals with more uses
-  // get allocated first.
-  if (FLAG_usage_computation) {
-    // This is currently not implemented.
-  }
-
+  // All variables that have no rewrite yet are non-parameter locals.
   for (int i = 0; i < temps_.length(); i++) {
     AllocateNonParameterLocal(temps_[i]);
   }
 
-  for (LocalsMap::Entry* p = locals_.Start(); p != NULL; p = locals_.Next(p)) {
+  for (VariableMap::Entry* p = variables_.Start();
+       p != NULL;
+       p = variables_.Next(p)) {
     Variable* var = reinterpret_cast<Variable*>(p->value);
     AllocateNonParameterLocal(var);
   }
 
-  // Note: For now, function_ must be allocated at the very end.  If
-  // it gets allocated in the context, it must be the last slot in the
-  // context, because of the current ScopeInfo implementation (see
+  // For now, function_ must be allocated at the very end.  If it gets
+  // allocated in the context, it must be the last slot in the context,
+  // because of the current ScopeInfo implementation (see
   // ScopeInfo::ScopeInfo(FunctionScope* scope) constructor).
   if (function_ != NULL) {
     AllocateNonParameterLocal(function_);
diff --git a/src/scopes.h b/src/scopes.h
index ea4e0f7..5767d9f 100644
--- a/src/scopes.h
+++ b/src/scopes.h
@@ -35,19 +35,22 @@
 namespace internal {
 
 
-// A hash map to support fast local variable declaration and lookup.
-class LocalsMap: public HashMap {
+// A hash map to support fast variable declaration and lookup.
+class VariableMap: public HashMap {
  public:
-  LocalsMap();
+  VariableMap();
 
   // Dummy constructor.  This constructor doesn't set up the map
   // properly so don't use it unless you have a good reason.
-  explicit LocalsMap(bool gotta_love_static_overloading);
+  explicit VariableMap(bool gotta_love_static_overloading);
 
-  virtual ~LocalsMap();
+  virtual ~VariableMap();
 
-  Variable* Declare(Scope* scope, Handle<String> name, Variable::Mode mode,
-                    bool is_valid_LHS, Variable::Kind kind);
+  Variable* Declare(Scope* scope,
+                    Handle<String> name,
+                    Variable::Mode mode,
+                    bool is_valid_lhs,
+                    Variable::Kind kind);
 
   Variable* Lookup(Handle<String> name);
 };
@@ -59,14 +62,14 @@
 // and setup time for scopes that don't need them.
 class DynamicScopePart : public ZoneObject {
  public:
-  LocalsMap* GetMap(Variable::Mode mode) {
+  VariableMap* GetMap(Variable::Mode mode) {
     int index = mode - Variable::DYNAMIC;
     ASSERT(index >= 0 && index < 3);
     return &maps_[index];
   }
 
  private:
-  LocalsMap maps_[3];
+  VariableMap maps_[3];
 };
 
 
@@ -105,7 +108,7 @@
   // Declarations
 
   // Lookup a variable in this scope. Returns the variable or NULL if not found.
-  virtual Variable* LookupLocal(Handle<String> name);
+  virtual Variable* LocalLookup(Handle<String> name);
 
   // Lookup a variable in this scope or outer scopes.
   // Returns the variable or NULL if not found.
@@ -116,9 +119,15 @@
   // outer scope. Only possible for function scopes; at most one variable.
   Variable* DeclareFunctionVar(Handle<String> name);
 
-  // Declare a variable in this scope. If the variable has been
+  // Declare a local variable in this scope. If the variable has been
   // declared before, the previously declared variable is returned.
-  virtual Variable* Declare(Handle<String> name, Variable::Mode mode);
+  virtual Variable* DeclareLocal(Handle<String> name, Variable::Mode mode);
+
+  // Declare an implicit global variable in this scope which must be a
+  // global scope.  The variable was introduced (possibly from an inner
+  // scope) by a reference to an unresolved variable with no intervening
+  // with statements or eval calls.
+  Variable* DeclareGlobal(Handle<String> name);
 
   // Add a parameter to the parameter list. The parameter must have been
   // declared via Declare. The same parameter may occur more then once in
@@ -288,25 +297,28 @@
   Handle<String> scope_name_;
 
   // The variables declared in this scope:
-  // all user-declared variables (incl. parameters)
-  LocalsMap locals_;
-  // compiler-allocated (user-invisible) temporaries
+  //
+  // All user-declared variables (incl. parameters).  For global scopes
+  // variables may be implicitly 'declared' by being used (possibly in
+  // an inner scope) with no intervening with statements or eval calls.
+  VariableMap variables_;
+  // Compiler-allocated (user-invisible) temporaries.
   ZoneList<Variable*> temps_;
-  // parameter list in source order
+  // Parameter list in source order.
   ZoneList<Variable*> params_;
-  // variables that must be looked up dynamically
+  // Variables that must be looked up dynamically.
   DynamicScopePart* dynamics_;
-  // unresolved variables referred to from this scope
+  // Unresolved variables referred to from this scope.
   ZoneList<VariableProxy*> unresolved_;
-  // declarations
+  // Declarations.
   ZoneList<Declaration*> decls_;
-  // convenience variable
+  // Convenience variable.
   VariableProxy* receiver_;
-  // function variable, if any; function scopes only
+  // Function variable, if any; function scopes only.
   Variable* function_;
-  // convenience variable; function scopes only
+  // Convenience variable; function scopes only.
   VariableProxy* arguments_;
-  // convenience variable; function scopes only
+  // Convenience variable; function scopes only.
   VariableProxy* arguments_shadow_;
 
   // Illegal redeclaration.
diff --git a/src/spaces.cc b/src/spaces.cc
index 077bcab..3f3a635 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -1141,7 +1141,7 @@
   // Summarize string types.
   int string_number = 0;
   int string_bytes = 0;
-#define INCREMENT(type, size, name)                  \
+#define INCREMENT(type, size, name, camel_name)      \
     string_number += heap_histograms[type].number(); \
     string_bytes += heap_histograms[type].bytes();
   STRING_TYPE_LIST(INCREMENT)
@@ -1185,8 +1185,8 @@
   // Lump all the string types together.
   int string_number = 0;
   int string_bytes = 0;
-#define INCREMENT(type, size, name)       \
-    string_number += info[type].number(); \
+#define INCREMENT(type, size, name, camel_name)       \
+    string_number += info[type].number();             \
     string_bytes += info[type].bytes();
   STRING_TYPE_LIST(INCREMENT)
 #undef INCREMENT
@@ -1266,12 +1266,12 @@
   // field and a next pointer, we give it a filler map that gives it the
   // correct size.
   if (size_in_bytes > ByteArray::kHeaderSize) {
-    set_map(Heap::byte_array_map());
+    set_map(Heap::raw_unchecked_byte_array_map());
     ByteArray::cast(this)->set_length(ByteArray::LengthFor(size_in_bytes));
   } else if (size_in_bytes == kPointerSize) {
-    set_map(Heap::one_word_filler_map());
+    set_map(Heap::raw_unchecked_one_word_filler_map());
   } else if (size_in_bytes == 2 * kPointerSize) {
-    set_map(Heap::two_word_filler_map());
+    set_map(Heap::raw_unchecked_two_word_filler_map());
   } else {
     UNREACHABLE();
   }
@@ -1280,14 +1280,14 @@
 
 
 Address FreeListNode::next() {
-  ASSERT(map() == Heap::byte_array_map());
+  ASSERT(map() == Heap::raw_unchecked_byte_array_map());
   ASSERT(Size() >= kNextOffset + kPointerSize);
   return Memory::Address_at(address() + kNextOffset);
 }
 
 
 void FreeListNode::set_next(Address next) {
-  ASSERT(map() == Heap::byte_array_map());
+  ASSERT(map() == Heap::raw_unchecked_byte_array_map());
   ASSERT(Size() >= kNextOffset + kPointerSize);
   Memory::Address_at(address() + kNextOffset) = next;
 }
@@ -1856,7 +1856,7 @@
             int bitpos = intoff*kBitsPerByte + bitoff;
             Address slot = p->OffsetToAddress(bitpos << kObjectAlignmentBits);
             Object** obj = reinterpret_cast<Object**>(slot);
-            if (*obj == Heap::fixed_array_map()) {
+            if (*obj == Heap::raw_unchecked_fixed_array_map()) {
               rset_marked_arrays++;
               FixedArray* fa = FixedArray::cast(HeapObject::FromAddress(slot));
 
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 49b20e2..8b3822a 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -562,10 +562,11 @@
 
 
 static Object* GetProbeValue(Code::Flags flags) {
-  NumberDictionary* dictionary = Heap::non_monomorphic_cache();
+  // Use raw_unchecked... so we don't get assert failures during GC.
+  NumberDictionary* dictionary = Heap::raw_unchecked_non_monomorphic_cache();
   int entry = dictionary->FindEntry(flags);
   if (entry != -1) return dictionary->ValueAt(entry);
-  return Heap::undefined_value();
+  return Heap::raw_unchecked_undefined_value();
 }
 
 
@@ -579,7 +580,7 @@
       Heap::non_monomorphic_cache()->AtNumberPut(flags,
                                                  Heap::undefined_value());
   if (result->IsFailure()) return result;
-  Heap::set_non_monomorphic_cache(NumberDictionary::cast(result));
+  Heap::public_set_non_monomorphic_cache(NumberDictionary::cast(result));
   return probe;
 }
 
diff --git a/src/variables.h b/src/variables.h
index c0d1435..3f1f11b 100644
--- a/src/variables.h
+++ b/src/variables.h
@@ -143,6 +143,12 @@
     ARGUMENTS
   };
 
+  Variable(Scope* scope,
+           Handle<String> name,
+           Mode mode,
+           bool is_valid_lhs,
+           Kind kind);
+
   // Printing support
   static const char* Mode2String(Mode mode);
 
@@ -196,9 +202,6 @@
   SmiAnalysis* type() { return &type_; }
 
  private:
-  Variable(Scope* scope, Handle<String> name, Mode mode, bool is_valid_LHS,
-           Kind kind);
-
   Scope* scope_;
   Handle<String> name_;
   Mode mode_;
@@ -216,13 +219,10 @@
   SmiAnalysis type_;
 
   // Code generation.
-  // rewrite_ is usually a Slot or a Property, but maybe any expression.
+  // rewrite_ is usually a Slot or a Property, but may be any expression.
   Expression* rewrite_;
 
-  friend class VariableProxy;
-  friend class Scope;
-  friend class LocalsMap;
-  friend class AstBuildingParser;
+  friend class Scope;  // Has explicit access to rewrite_.
 };
 
 
diff --git a/src/version.cc b/src/version.cc
index 7e009fb..9a65949 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     1
 #define MINOR_VERSION     2
-#define BUILD_NUMBER      12
+#define BUILD_NUMBER      13
 #define PATCH_LEVEL       0
 #define CANDIDATE_VERSION false
 
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index 2ccfd15..167334f 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -427,6 +427,17 @@
 }
 
 
+void Assembler::arithmetic_op_32(byte opcode,
+                                 const Operand& dst,
+                                 Register src) {
+  EnsureSpace ensure_space(this);
+  last_pc_ = pc_;
+  emit_optional_rex_32(src, dst);
+  emit(opcode);
+  emit_operand(src, dst);
+}
+
+
 void Assembler::immediate_arithmetic_op(byte subcode,
                                         Register dst,
                                         Immediate src) {
@@ -1068,6 +1079,19 @@
 
 
 void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) {
+  // Non-relocatable values might not need a 64-bit representation.
+  if (rmode == RelocInfo::NONE) {
+    // Sadly, there is no zero or sign extending move for 8-bit immediates.
+    if (is_int32(value)) {
+      movq(dst, Immediate(static_cast<int32_t>(value)));
+      return;
+    } else if (is_uint32(value)) {
+      movl(dst, Immediate(static_cast<int32_t>(value)));
+      return;
+    }
+    // Value cannot be represented by 32 bits, so do a full 64 bit immediate
+    // value.
+  }
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   emit_rex_64(dst);
@@ -1097,16 +1121,24 @@
 
 
 void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
-  EnsureSpace ensure_space(this);
-  last_pc_ = pc_;
-  ASSERT(!Heap::InNewSpace(*value));
-  emit_rex_64(dst);
-  emit(0xB8 | dst.low_bits());
-  if (value->IsHeapObject()) {
-    emitq(reinterpret_cast<uintptr_t>(value.location()), mode);
+  // If there is no relocation info, emit the value of the handle efficiently
+  // (possibly using less that 8 bytes for the value).
+  if (mode == RelocInfo::NONE) {
+    // There is no possible reason to store a heap pointer without relocation
+    // info, so it must be a smi.
+    ASSERT(value->IsSmi());
+    // Smis never have more than 32 significant bits, but they might
+    // have garbage in the high bits.
+    movq(dst,
+         Immediate(static_cast<int32_t>(reinterpret_cast<intptr_t>(*value))));
   } else {
-    ASSERT_EQ(RelocInfo::NONE, mode);
-    emitq(reinterpret_cast<uintptr_t>(*value), RelocInfo::NONE);
+    EnsureSpace ensure_space(this);
+    last_pc_ = pc_;
+    ASSERT(value->IsHeapObject());
+    ASSERT(!Heap::InNewSpace(*value));
+    emit_rex_64(dst);
+    emit(0xB8 | dst.low_bits());
+    emitq(reinterpret_cast<uintptr_t>(value.location()), mode);
   }
 }
 
@@ -1449,7 +1481,7 @@
   last_pc_ = pc_;
   if (reg.is(rax)) {
     emit(0xA8);
-    emit(mask);
+    emit(mask.value_);  // Low byte emitted.
   } else {
     if (reg.code() > 3) {
       // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
@@ -1473,6 +1505,15 @@
 }
 
 
+void Assembler::testl(Register dst, Register src) {
+  EnsureSpace ensure_space(this);
+  last_pc_ = pc_;
+  emit_optional_rex_32(dst, src);
+  emit(0x85);
+  emit_modrm(dst, src);
+}
+
+
 void Assembler::testl(Register reg, Immediate mask) {
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index d99401b..7e30934 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -562,6 +562,26 @@
     immediate_arithmetic_op_8(0x7, dst, src);
   }
 
+  void cmpl(Register dst, Register src) {
+    arithmetic_op_32(0x3B, dst, src);
+  }
+
+  void cmpl(Register dst, const Operand& src) {
+    arithmetic_op_32(0x3B, src, dst);
+  }
+
+  void cmpl(const Operand& dst, Register src) {
+    arithmetic_op_32(0x39, dst, src);
+  }
+
+  void cmpl(Register dst, Immediate src) {
+    immediate_arithmetic_op_32(0x7, dst, src);
+  }
+
+  void cmpl(const Operand& dst, Immediate src) {
+    immediate_arithmetic_op_32(0x7, dst, src);
+  }
+
   void cmpq(Register dst, Register src) {
     arithmetic_op(0x3B, dst, src);
   }
@@ -578,10 +598,6 @@
     immediate_arithmetic_op(0x7, dst, src);
   }
 
-  void cmpl(Register dst, Immediate src) {
-    immediate_arithmetic_op_32(0x7, dst, src);
-  }
-
   void cmpq(const Operand& dst, Immediate src) {
     immediate_arithmetic_op(0x7, dst, src);
   }
@@ -740,6 +756,7 @@
 
   void testb(Register reg, Immediate mask);
   void testb(const Operand& op, Immediate mask);
+  void testl(Register dst, Register src);
   void testl(Register reg, Immediate mask);
   void testl(const Operand& op, Immediate mask);
   void testq(const Operand& op, Register reg);
@@ -1086,6 +1103,7 @@
   // ModR/M byte.
   void arithmetic_op(byte opcode, Register dst, Register src);
   void arithmetic_op_32(byte opcode, Register dst, Register src);
+  void arithmetic_op_32(byte opcode, const Operand& dst, Register src);
   void arithmetic_op(byte opcode, Register reg, const Operand& op);
   void immediate_arithmetic_op(byte subcode, Register dst, Immediate src);
   void immediate_arithmetic_op(byte subcode, const Operand& dst, Immediate src);
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index 54138a2..b1f2b8f 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -366,15 +366,6 @@
 }
 
 
-void CodeGenerator::GenerateFastCaseSwitchJumpTable(SwitchStatement* a,
-                                                    int b,
-                                                    int c,
-                                                    Label* d,
-                                                    Vector<Label*> e,
-                                                    Vector<Label> f) {
-  UNIMPLEMENTED();
-}
-
 #ifdef DEBUG
 bool CodeGenerator::HasValidEntryRegisters() {
   return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0))
@@ -1276,7 +1267,7 @@
 
   frame_->EmitPush(rax);  // <- slot 3
   frame_->EmitPush(rdx);  // <- slot 2
-  __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
+  __ movsxlq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
   __ shl(rax, Immediate(kSmiTagSize));
   frame_->EmitPush(rax);  // <- slot 1
   frame_->EmitPush(Immediate(Smi::FromInt(0)));  // <- slot 0
@@ -1288,7 +1279,7 @@
   frame_->EmitPush(rax);  // <- slot 2
 
   // Push the length of the array and the initial index onto the stack.
-  __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
+  __ movsxlq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
   __ shl(rax, Immediate(kSmiTagSize));
   frame_->EmitPush(rax);  // <- slot 1
   frame_->EmitPush(Immediate(Smi::FromInt(0)));  // <- slot 0
@@ -1308,8 +1299,7 @@
   __ movq(rdx, frame_->ElementAt(2));
   ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   // Multiplier is times_4 since rax is already a Smi.
-  __ movq(rbx, Operand(rdx, rax, times_4,
-                       FixedArray::kHeaderSize - kHeapObjectTag));
+  __ movq(rbx, FieldOperand(rdx, rax, times_4, FixedArray::kHeaderSize));
 
   // Get the expected map from the stack or a zero map in the
   // permanent slow case rax: current iteration count rbx: i'th entry
@@ -2459,13 +2449,13 @@
   // receiver.  Use a scratch register to avoid destroying the result.
   Result scratch = allocator_->Allocate();
   ASSERT(scratch.is_valid());
-  __ movl(scratch.reg(),
+  __ movq(scratch.reg(),
           FieldOperand(result.reg(), FixedArray::OffsetOfElementAt(0)));
   frame_->SetElementAt(arg_count + 1, &scratch);
 
   // We can reuse the result register now.
   frame_->Spill(result.reg());
-  __ movl(result.reg(),
+  __ movq(result.reg(),
           FieldOperand(result.reg(), FixedArray::OffsetOfElementAt(1)));
   frame_->SetElementAt(arg_count, &result);
 
@@ -3144,11 +3134,9 @@
       __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
                Immediate(1 << Map::kIsUndetectable));
       destination()->false_target()->Branch(not_zero);
-      __ movb(kScratchRegister,
-              FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
-      __ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
+      __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
       destination()->false_target()->Branch(below);
-      __ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
+      __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
       answer.Unuse();
       destination()->Split(below_equal);
     } else {
@@ -3246,10 +3234,25 @@
 
 
 void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
-  // TODO(X64): Optimize this like it's done on IA-32.
   ASSERT(args->length() == 0);
-  Result answer = frame_->CallRuntime(Runtime::kIsConstructCall, 0);
-  frame_->Push(&answer);
+
+  // Get the frame pointer for the calling frame.
+  Result fp = allocator()->Allocate();
+  __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+
+  // Skip the arguments adaptor frame if it exists.
+  Label check_frame_marker;
+  __ cmpq(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
+          Immediate(ArgumentsAdaptorFrame::SENTINEL));
+  __ j(not_equal, &check_frame_marker);
+  __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
+
+  // Check the marker in the calling frame.
+  __ bind(&check_frame_marker);
+  __ cmpq(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
+          Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
+  fp.Unuse();
+  destination()->Split(equal);
 }
 
 
@@ -3361,7 +3364,21 @@
 
 
 void CodeGenerator::GenerateFastMathOp(MathOp op, ZoneList<Expression*>* args) {
-  UNIMPLEMENTED();
+  // TODO(X64): Use inline floating point in the fast case.
+  ASSERT(args->length() == 1);
+
+  // Load number.
+  Load(args->at(0));
+  Result answer;
+  switch (op) {
+    case SIN:
+      answer = frame_->CallRuntime(Runtime::kMath_sin, 1);
+      break;
+    case COS:
+      answer = frame_->CallRuntime(Runtime::kMath_cos, 1);
+      break;
+  }
+  frame_->Push(&answer);
 }
 
 
@@ -3379,27 +3396,22 @@
 
   // Check that the object is a JS object but take special care of JS
   // functions to make sure they have 'Function' as their class.
-  { Result tmp = allocator()->Allocate();
-    __ movq(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
-    __ movb(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset));
-    __ cmpb(tmp.reg(), Immediate(FIRST_JS_OBJECT_TYPE));
-    null.Branch(less);
 
-    // As long as JS_FUNCTION_TYPE is the last instance type and it is
-    // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
-    // LAST_JS_OBJECT_TYPE.
-    ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
-    ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
-    __ cmpb(tmp.reg(), Immediate(JS_FUNCTION_TYPE));
-    function.Branch(equal);
-  }
+  __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
+  null.Branch(less);
+
+  // As long as JS_FUNCTION_TYPE is the last instance type and it is
+  // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
+  // LAST_JS_OBJECT_TYPE.
+  ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+  ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
+  __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
+  function.Branch(equal);
 
   // Check if the constructor in the map is a function.
-  { Result tmp = allocator()->Allocate();
-    __ movq(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
-    __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
-    non_function_constructor.Branch(not_equal);
-  }
+  __ movq(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
+  __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister);
+  non_function_constructor.Branch(not_equal);
 
   // The obj register now contains the constructor function. Grab the
   // instance class name from there.
@@ -3803,8 +3815,28 @@
 Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
                                                          Result tmp,
                                                          JumpTarget* slow) {
-  UNIMPLEMENTED();
-  return Operand(rsp, 0);
+  ASSERT(slot->type() == Slot::CONTEXT);
+  ASSERT(tmp.is_register());
+  Register context = rsi;
+
+  for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
+    if (s->num_heap_slots() > 0) {
+      if (s->calls_eval()) {
+        // Check that extension is NULL.
+        __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
+                Immediate(0));
+        slow->Branch(not_equal, not_taken);
+      }
+      __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
+      __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
+      context = tmp.reg();
+    }
+  }
+  // Check that last extension is NULL.
+  __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
+  slow->Branch(not_equal, not_taken);
+  __ movq(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
+  return ContextOperand(tmp.reg(), slot->index());
 }
 
 
@@ -4316,12 +4348,8 @@
       left_side = Result(left_reg);
       right_side = Result(right_val);
       // Test smi equality and comparison by signed int comparison.
-      if (IsUnsafeSmi(right_side.handle())) {
-        right_side.ToRegister();
-        __ cmpq(left_side.reg(), right_side.reg());
-      } else {
-        __ Cmp(left_side.reg(), right_side.handle());
-      }
+      // Both sides are smis, so we can use an Immediate.
+      __ cmpl(left_side.reg(), Immediate(Smi::cast(*right_side.handle())));
       left_side.Unuse();
       right_side.Unuse();
       dest->Split(cc);
@@ -4373,7 +4401,8 @@
       // When non-smi, call out to the compare stub.
       CompareStub stub(cc, strict);
       Result answer = frame_->CallStub(&stub, &left_side, &right_side);
-      __ testq(answer.reg(), answer.reg());  // Both zero and sign flag right.
+      // The result is a Smi, which is negative, zero, or positive.
+      __ testl(answer.reg(), answer.reg());  // Both zero and sign flag right.
       answer.Unuse();
       dest->Split(cc);
     } else {
@@ -4393,11 +4422,7 @@
       // When non-smi, call out to the compare stub.
       CompareStub stub(cc, strict);
       Result answer = frame_->CallStub(&stub, &left_side, &right_side);
-      if (cc == equal) {
-        __ testq(answer.reg(), answer.reg());
-      } else {
-        __ cmpq(answer.reg(), Immediate(0));
-      }
+      __ testl(answer.reg(), answer.reg());  // Sets both zero and sign flags.
       answer.Unuse();
       dest->true_target()->Branch(cc);
       dest->false_target()->Jump();
@@ -4405,7 +4430,7 @@
       is_smi.Bind();
       left_side = Result(left_reg);
       right_side = Result(right_reg);
-      __ cmpq(left_side.reg(), right_side.reg());
+      __ cmpl(left_side.reg(), right_side.reg());
       right_side.Unuse();
       left_side.Unuse();
       dest->Split(cc);
@@ -5421,6 +5446,7 @@
   __ j(equal, &false_result);
 
   // Get the map and type of the heap object.
+  // We don't use CmpObjectType because we manipulate the type field.
   __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
   __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
 
@@ -5446,6 +5472,7 @@
 
   __ bind(&not_string);
   // HeapNumber => false iff +0, -0, or NaN.
+  // These three cases set C3 when compared to zero in the FPU.
   __ Cmp(rdx, Factory::heap_number_map());
   __ j(not_equal, &true_result);
   // TODO(x64): Don't use fp stack, use MMX registers?
@@ -5455,9 +5482,9 @@
   __ fucompp();  // Compare and pop both values.
   __ movq(kScratchRegister, rax);
   __ fnstsw_ax();  // Store fp status word in ax, no checking for exceptions.
-  __ testb(rax, Immediate(0x08));  // Test FP condition flag C3.
+  __ testl(rax, Immediate(0x4000));  // Test FP condition flag C3, bit 16.
   __ movq(rax, kScratchRegister);
-  __ j(zero, &false_result);
+  __ j(not_zero, &false_result);
   // Fall through to |true_result|.
 
   // Return 1/0 for true/false in rax.
@@ -5617,7 +5644,7 @@
       // The representation of NaN values has all exponent bits (52..62) set,
       // and not all mantissa bits (0..51) clear.
       // Read double representation into rax.
-      __ movq(rbx, 0x7ff0000000000000, RelocInfo::NONE);
+      __ movq(rbx, V8_UINT64_C(0x7ff0000000000000), RelocInfo::NONE);
       __ movq(rax, FieldOperand(rdx, HeapNumber::kValueOffset));
       // Test that exponent bits are all set.
       __ or_(rbx, rax);
@@ -5627,7 +5654,8 @@
       __ shl(rax, Immediate(12));
       // If all bits in the mantissa are zero the number is Infinity, and
       // we return zero.  Otherwise it is a NaN, and we return non-zero.
-      // So just return rax.
+      // We cannot just return rax because only eax is tested on return.
+      __ setcc(not_zero, rax);
       __ ret(0);
 
       __ bind(&not_identical);
@@ -5665,7 +5693,7 @@
                Factory::heap_number_map());
         // If heap number, handle it in the slow case.
         __ j(equal, &slow);
-        // Return non-equal (ebx is not zero)
+        // Return non-equal.  ebx (the lower half of rbx) is not zero.
         __ movq(rax, rbx);
         __ ret(0);
 
@@ -5681,7 +5709,7 @@
       Label first_non_object;
       __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
       __ j(below, &first_non_object);
-      // Return non-zero (rax is not zero)
+      // Return non-zero (eax (not rax) is not zero)
       Label return_not_equal;
       ASSERT(kHeapObjectTag != 0);
       __ bind(&return_not_equal);
@@ -5745,7 +5773,7 @@
     BranchIfNonSymbol(masm, &call_builtin, rdx, kScratchRegister);
 
     // We've already checked for object identity, so if both operands
-    // are symbols they aren't equal. Register rax already holds a
+    // are symbols they aren't equal. Register eax (not rax) already holds a
     // non-zero value, which indicates not equal, so just return.
     __ ret(2 * kPointerSize);
   }
diff --git a/src/x64/codegen-x64.h b/src/x64/codegen-x64.h
index 0e8505a..bb4b538 100644
--- a/src/x64/codegen-x64.h
+++ b/src/x64/codegen-x64.h
@@ -543,58 +543,6 @@
   inline void GenerateMathSin(ZoneList<Expression*>* args);
   inline void GenerateMathCos(ZoneList<Expression*>* args);
 
-  // Methods and constants for fast case switch statement support.
-  //
-  // Only allow fast-case switch if the range of labels is at most
-  // this factor times the number of case labels.
-  // Value is derived from comparing the size of code generated by the normal
-  // switch code for Smi-labels to the size of a single pointer. If code
-  // quality increases this number should be decreased to match.
-  static const int kFastSwitchMaxOverheadFactor = 5;
-
-  // Minimal number of switch cases required before we allow jump-table
-  // optimization.
-  static const int kFastSwitchMinCaseCount = 5;
-
-  // The limit of the range of a fast-case switch, as a factor of the number
-  // of cases of the switch. Each platform should return a value that
-  // is optimal compared to the default code generated for a switch statement
-  // on that platform.
-  int FastCaseSwitchMaxOverheadFactor();
-
-  // The minimal number of cases in a switch before the fast-case switch
-  // optimization is enabled. Each platform should return a value that
-  // is optimal compared to the default code generated for a switch statement
-  // on that platform.
-  int FastCaseSwitchMinCaseCount();
-
-  // Allocate a jump table and create code to jump through it.
-  // Should call GenerateFastCaseSwitchCases to generate the code for
-  // all the cases at the appropriate point.
-  void GenerateFastCaseSwitchJumpTable(SwitchStatement* node,
-                                       int min_index,
-                                       int range,
-                                       Label* fail_label,
-                                       Vector<Label*> case_targets,
-                                       Vector<Label> case_labels);
-
-  // Generate the code for cases for the fast case switch.
-  // Called by GenerateFastCaseSwitchJumpTable.
-  void GenerateFastCaseSwitchCases(SwitchStatement* node,
-                                   Vector<Label> case_labels,
-                                   VirtualFrame* start_frame);
-
-  // Fast support for constant-Smi switches.
-  void GenerateFastCaseSwitchStatement(SwitchStatement* node,
-                                       int min_index,
-                                       int range,
-                                       int default_index);
-
-  // Fast support for constant-Smi switches. Tests whether switch statement
-  // permits optimization and calls GenerateFastCaseSwitch if it does.
-  // Returns true if the fast-case switch was generated, and false if not.
-  bool TryGenerateFastCaseSwitchStatement(SwitchStatement* node);
-
   // Methods used to indicate which source code is generated for. Source
   // positions are collected by the assembler and emitted with the relocation
   // information.
diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc
index 767b124..f962c01 100644
--- a/src/x64/disasm-x64.cc
+++ b/src/x64/disasm-x64.cc
@@ -25,64 +25,1408 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+#include <assert.h>
+#include <stdio.h>
+#include <stdarg.h>
+
 #include "v8.h"
 #include "disasm.h"
 
 namespace disasm {
 
-Disassembler::Disassembler(NameConverter const& converter)
-    : converter_(converter) {
-  UNIMPLEMENTED();
+enum OperandOrder {
+  UNSET_OP_ORDER = 0, REG_OPER_OP_ORDER, OPER_REG_OP_ORDER
+};
+
+//------------------------------------------------------------------
+// Tables
+//------------------------------------------------------------------
+struct ByteMnemonic {
+  int b;  // -1 terminates, otherwise must be in range (0..255)
+  OperandOrder op_order_;
+  const char* mnem;
+};
+
+
+static ByteMnemonic two_operands_instr[] = {
+  { 0x03, REG_OPER_OP_ORDER, "add" },
+  { 0x21, OPER_REG_OP_ORDER, "and" },
+  { 0x23, REG_OPER_OP_ORDER, "and" },
+  { 0x3B, REG_OPER_OP_ORDER, "cmp" },
+  { 0x8D, REG_OPER_OP_ORDER, "lea" },
+  { 0x09, OPER_REG_OP_ORDER, "or" },
+  { 0x0B, REG_OPER_OP_ORDER, "or" },
+  { 0x1B, REG_OPER_OP_ORDER, "sbb" },
+  { 0x29, OPER_REG_OP_ORDER, "sub" },
+  { 0x2B, REG_OPER_OP_ORDER, "sub" },
+  { 0x85, REG_OPER_OP_ORDER, "test" },
+  { 0x31, OPER_REG_OP_ORDER, "xor" },
+  { 0x33, REG_OPER_OP_ORDER, "xor" },
+  { 0x87, REG_OPER_OP_ORDER, "xchg" },
+  { 0x8A, REG_OPER_OP_ORDER, "movb" },
+  { 0x8B, REG_OPER_OP_ORDER, "mov" },
+  { -1, UNSET_OP_ORDER, "" }
+};
+
+
+static ByteMnemonic zero_operands_instr[] = {
+  { 0xC3, UNSET_OP_ORDER, "ret" },
+  { 0xC9, UNSET_OP_ORDER, "leave" },
+  { 0x90, UNSET_OP_ORDER, "nop" },
+  { 0xF4, UNSET_OP_ORDER, "hlt" },
+  { 0xCC, UNSET_OP_ORDER, "int3" },
+  { 0x60, UNSET_OP_ORDER, "pushad" },
+  { 0x61, UNSET_OP_ORDER, "popad" },
+  { 0x9C, UNSET_OP_ORDER, "pushfd" },
+  { 0x9D, UNSET_OP_ORDER, "popfd" },
+  { 0x9E, UNSET_OP_ORDER, "sahf" },
+  { 0x99, UNSET_OP_ORDER, "cdq" },
+  { 0x9B, UNSET_OP_ORDER, "fwait" },
+  { -1, UNSET_OP_ORDER, "" }
+};
+
+
+static ByteMnemonic call_jump_instr[] = {
+  { 0xE8, UNSET_OP_ORDER, "call" },
+  { 0xE9, UNSET_OP_ORDER, "jmp" },
+  { -1, UNSET_OP_ORDER, "" }
+};
+
+
+static ByteMnemonic short_immediate_instr[] = {
+  { 0x05, UNSET_OP_ORDER, "add" },
+  { 0x0D, UNSET_OP_ORDER, "or" },
+  { 0x15, UNSET_OP_ORDER, "adc" },
+  { 0x25, UNSET_OP_ORDER, "and" },
+  { 0x2D, UNSET_OP_ORDER, "sub" },
+  { 0x35, UNSET_OP_ORDER, "xor" },
+  { 0x3D, UNSET_OP_ORDER, "cmp" },
+  { -1, UNSET_OP_ORDER, "" }
+};
+
+
+static const char* conditional_code_suffix[] = {
+  "o", "no", "c", "nc", "z", "nz", "a", "na",
+  "s", "ns", "pe", "po", "l", "ge", "le", "g"
+};
+
+
+enum InstructionType {
+  NO_INSTR,
+  ZERO_OPERANDS_INSTR,
+  TWO_OPERANDS_INSTR,
+  JUMP_CONDITIONAL_SHORT_INSTR,
+  REGISTER_INSTR,
+  PUSHPOP_INSTR,  // Has implicit 64-bit operand size.
+  MOVE_REG_INSTR,
+  CALL_JUMP_INSTR,
+  SHORT_IMMEDIATE_INSTR
+};
+
+
+struct InstructionDesc {
+  const char* mnem;
+  InstructionType type;
+  OperandOrder op_order_;
+};
+
+
+class InstructionTable {
+ public:
+  InstructionTable();
+  const InstructionDesc& Get(byte x) const {
+    return instructions_[x];
+  }
+
+ private:
+  InstructionDesc instructions_[256];
+  void Clear();
+  void Init();
+  void CopyTable(ByteMnemonic bm[], InstructionType type);
+  void SetTableRange(InstructionType type, byte start, byte end,
+                     const char* mnem);
+  void AddJumpConditionalShort();
+};
+
+
+InstructionTable::InstructionTable() {
+  Clear();
+  Init();
 }
 
 
-Disassembler::~Disassembler() {
-  UNIMPLEMENTED();
+void InstructionTable::Clear() {
+  for (int i = 0; i < 256; i++) {
+    instructions_[i].mnem = "";
+    instructions_[i].type = NO_INSTR;
+    instructions_[i].op_order_ = UNSET_OP_ORDER;
+  }
 }
 
 
-const char* NameConverter::NameOfAddress(unsigned char* addr) const {
-  UNIMPLEMENTED();
-  return NULL;
+void InstructionTable::Init() {
+  CopyTable(two_operands_instr, TWO_OPERANDS_INSTR);
+  CopyTable(zero_operands_instr, ZERO_OPERANDS_INSTR);
+  CopyTable(call_jump_instr, CALL_JUMP_INSTR);
+  CopyTable(short_immediate_instr, SHORT_IMMEDIATE_INSTR);
+  AddJumpConditionalShort();
+  SetTableRange(PUSHPOP_INSTR, 0x50, 0x57, "push");
+  SetTableRange(PUSHPOP_INSTR, 0x58, 0x5F, "pop");
+  SetTableRange(MOVE_REG_INSTR, 0xB8, 0xBF, "mov");
+}
+
+
+void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) {
+  for (int i = 0; bm[i].b >= 0; i++) {
+    InstructionDesc* id = &instructions_[bm[i].b];
+    id->mnem = bm[i].mnem;
+    id->op_order_ = bm[i].op_order_;
+    assert(id->type == NO_INSTR);  // Information already entered
+    id->type = type;
+  }
+}
+
+
+void InstructionTable::SetTableRange(InstructionType type, byte start,
+                                     byte end, const char* mnem) {
+  for (byte b = start; b <= end; b++) {
+    InstructionDesc* id = &instructions_[b];
+    assert(id->type == NO_INSTR);  // Information already entered
+    id->mnem = mnem;
+    id->type = type;
+  }
+}
+
+
+void InstructionTable::AddJumpConditionalShort() {
+  for (byte b = 0x70; b <= 0x7F; b++) {
+    InstructionDesc* id = &instructions_[b];
+    assert(id->type == NO_INSTR);  // Information already entered
+    id->mnem = NULL;  // Computed depending on condition code.
+    id->type = JUMP_CONDITIONAL_SHORT_INSTR;
+  }
+}
+
+
+static InstructionTable instruction_table;
+
+
+// The X64 disassembler implementation.
+enum UnimplementedOpcodeAction {
+  CONTINUE_ON_UNIMPLEMENTED_OPCODE,
+  ABORT_ON_UNIMPLEMENTED_OPCODE
+};
+
+
+class DisassemblerX64 {
+ public:
+  DisassemblerX64(const NameConverter& converter,
+                  UnimplementedOpcodeAction unimplemented_action =
+                      ABORT_ON_UNIMPLEMENTED_OPCODE)
+      : converter_(converter),
+        tmp_buffer_pos_(0),
+        abort_on_unimplemented_(
+            unimplemented_action == ABORT_ON_UNIMPLEMENTED_OPCODE),
+        rex_(0),
+        operand_size_(0) {
+    tmp_buffer_[0] = '\0';
+  }
+
+  virtual ~DisassemblerX64() {
+  }
+
+  // Writes one disassembled instruction into 'buffer' (0-terminated).
+  // Returns the length of the disassembled machine instruction in bytes.
+  int InstructionDecode(v8::internal::Vector<char> buffer, byte* instruction);
+
+ private:
+
+  const NameConverter& converter_;
+  v8::internal::EmbeddedVector<char, 128> tmp_buffer_;
+  unsigned int tmp_buffer_pos_;
+  bool abort_on_unimplemented_;
+  // Prefixes parsed
+  byte rex_;
+  byte operand_size_;
+
+  void setOperandSizePrefix(byte prefix) {
+    ASSERT_EQ(0x66, prefix);
+    operand_size_ = prefix;
+  }
+
+  void setRex(byte rex) {
+    ASSERT_EQ(0x40, rex & 0xF0);
+    rex_ = rex;
+  }
+
+  bool rex() { return rex_ != 0; }
+
+  bool rex_b() { return (rex_ & 0x01) != 0; }
+
+  // Actual number of base register given the low bits and the rex.b state.
+  int base_reg(int low_bits) { return low_bits | ((rex_ & 0x01) << 3); }
+
+  bool rex_x() { return (rex_ & 0x02) != 0; }
+
+  bool rex_r() { return (rex_ & 0x04) != 0; }
+
+  bool rex_w() { return (rex_ & 0x08) != 0; }
+
+  int operand_size() {
+    return rex_w() ? 64 : (operand_size_ != 0) ? 16 : 32;
+  }
+
+  char operand_size_code() {
+    return rex_w() ? 'q' : (operand_size_ != 0) ? 'w' : 'l';
+  }
+
+  const char* NameOfCPURegister(int reg) const {
+    return converter_.NameOfCPURegister(reg);
+  }
+
+  const char* NameOfByteCPURegister(int reg) const {
+    return converter_.NameOfByteCPURegister(reg);
+  }
+
+  const char* NameOfXMMRegister(int reg) const {
+    return converter_.NameOfXMMRegister(reg);
+  }
+
+  const char* NameOfAddress(byte* addr) const {
+    return converter_.NameOfAddress(addr);
+  }
+
+  // Disassembler helper functions.
+  void get_modrm(byte data,
+                 int* mod,
+                 int* regop,
+                 int* rm) {
+    *mod = (data >> 6) & 3;
+    *regop = ((data & 0x38) >> 3) | (rex_r() ? 8 : 0);
+    *rm = (data & 7) | (rex_b() ? 8 : 0);
+  }
+
+  void get_sib(byte data,
+               int* scale,
+               int* index,
+               int* base) {
+    *scale = (data >> 6) & 3;
+    *index = ((data >> 3) & 7) | (rex_x() ? 8 : 0);
+    *base = data & 7 | (rex_b() ? 8 : 0);
+  }
+
+  typedef const char* (DisassemblerX64::*RegisterNameMapping)(int reg) const;
+
+  int PrintRightOperandHelper(byte* modrmp,
+                              RegisterNameMapping register_name);
+  int PrintRightOperand(byte* modrmp);
+  int PrintRightByteOperand(byte* modrmp);
+  int PrintOperands(const char* mnem,
+                    OperandOrder op_order,
+                    byte* data);
+  int PrintImmediateOp(byte* data);
+  int F7Instruction(byte* data);
+  int D1D3C1Instruction(byte* data);
+  int JumpShort(byte* data);
+  int JumpConditional(byte* data);
+  int JumpConditionalShort(byte* data);
+  int SetCC(byte* data);
+  int FPUInstruction(byte* data);
+  void AppendToBuffer(const char* format, ...);
+
+  void UnimplementedInstruction() {
+    if (abort_on_unimplemented_) {
+      UNIMPLEMENTED();
+    } else {
+      AppendToBuffer("'Unimplemented Instruction'");
+    }
+  }
+};
+
+
+void DisassemblerX64::AppendToBuffer(const char* format, ...) {
+  v8::internal::Vector<char> buf = tmp_buffer_ + tmp_buffer_pos_;
+  va_list args;
+  va_start(args, format);
+  int result = v8::internal::OS::VSNPrintF(buf, format, args);
+  va_end(args);
+  tmp_buffer_pos_ += result;
+}
+
+
+int DisassemblerX64::PrintRightOperandHelper(
+    byte* modrmp,
+    RegisterNameMapping register_name) {
+  int mod, regop, rm;
+  get_modrm(*modrmp, &mod, &regop, &rm);
+  switch (mod) {
+    case 0:
+      if ((rm & 7) == 5) {
+        int32_t disp = *reinterpret_cast<int32_t*>(modrmp + 1);
+        AppendToBuffer("[0x%x]", disp);
+        return 5;
+      } else if ((rm & 7) == 4) {
+        // Codes for SIB byte.
+        byte sib = *(modrmp + 1);
+        int scale, index, base;
+        get_sib(sib, &scale, &index, &base);
+        if (index == 4 && (base & 7) == 4 && scale == 0 /*times_1*/) {
+          // index == rsp means no index. Only use sib byte with no index for
+          // rsp and r12 base.
+          AppendToBuffer("[%s]", (this->*register_name)(base));
+          return 2;
+        } else if (base == 5) {
+          // base == rbp means no base register (when mod == 0).
+          int32_t disp = *reinterpret_cast<int32_t*>(modrmp + 2);
+          AppendToBuffer("[%s*%d+0x%x]",
+                         (this->*register_name)(index),
+                         1 << scale, disp);
+          return 6;
+        } else if (index != 4 && base != 5) {
+          // [base+index*scale]
+          AppendToBuffer("[%s+%s*%d]",
+                         (this->*register_name)(base),
+                         (this->*register_name)(index),
+                         1 << scale);
+          return 2;
+        } else {
+          UnimplementedInstruction();
+          return 1;
+        }
+      } else {
+        AppendToBuffer("[%s]", (this->*register_name)(rm));
+        return 1;
+      }
+      break;
+    case 1:  // fall through
+    case 2:
+      if ((rm & 7) == 4) {
+        byte sib = *(modrmp + 1);
+        int scale, index, base;
+        get_sib(sib, &scale, &index, &base);
+        int disp = (mod == 2) ? *reinterpret_cast<int32_t*>(modrmp + 2)
+                              : *reinterpret_cast<char*>(modrmp + 2);
+        if (index == 4 && (base & 7) == 4 && scale == 0 /*times_1*/) {
+          if (-disp > 0) {
+            AppendToBuffer("[%s-0x%x]", (this->*register_name)(base), -disp);
+          } else {
+            AppendToBuffer("[%s+0x%x]", (this->*register_name)(base), disp);
+          }
+        } else {
+          if (-disp > 0) {
+            AppendToBuffer("[%s+%s*%d-0x%x]",
+                           (this->*register_name)(base),
+                           (this->*register_name)(index),
+                           1 << scale,
+                           -disp);
+          } else {
+            AppendToBuffer("[%s+%s*%d+0x%x]",
+                           (this->*register_name)(base),
+                           (this->*register_name)(index),
+                           1 << scale,
+                           disp);
+          }
+        }
+        return mod == 2 ? 6 : 3;
+      } else {
+        // No sib.
+        int disp = (mod == 2) ? *reinterpret_cast<int32_t*>(modrmp + 1)
+                              : *reinterpret_cast<char*>(modrmp + 1);
+        if (-disp > 0) {
+        AppendToBuffer("[%s-0x%x]", (this->*register_name)(rm), -disp);
+        } else {
+        AppendToBuffer("[%s+0x%x]", (this->*register_name)(rm), disp);
+        }
+        return (mod == 2) ? 5 : 2;
+      }
+      break;
+    case 3:
+      AppendToBuffer("%s", (this->*register_name)(rm));
+      return 1;
+    default:
+      UnimplementedInstruction();
+      return 1;
+  }
+  UNREACHABLE();
+}
+
+
+int DisassemblerX64::PrintRightOperand(byte* modrmp) {
+  return PrintRightOperandHelper(modrmp,
+                                 &DisassemblerX64::NameOfCPURegister);
+}
+
+
+int DisassemblerX64::PrintRightByteOperand(byte* modrmp) {
+  return PrintRightOperandHelper(modrmp,
+                                 &DisassemblerX64::NameOfByteCPURegister);
+}
+
+
+// Returns number of bytes used including the current *data.
+// Writes instruction's mnemonic, left and right operands to 'tmp_buffer_'.
+int DisassemblerX64::PrintOperands(const char* mnem,
+                                   OperandOrder op_order,
+                                   byte* data) {
+  byte modrm = *data;
+  int mod, regop, rm;
+  get_modrm(modrm, &mod, &regop, &rm);
+  int advance = 0;
+  switch (op_order) {
+    case REG_OPER_OP_ORDER: {
+      AppendToBuffer("%s%c %s,",
+                     mnem,
+                     operand_size_code(),
+                     NameOfCPURegister(regop));
+      advance = PrintRightOperand(data);
+      break;
+    }
+    case OPER_REG_OP_ORDER: {
+      AppendToBuffer("%s%c ", mnem, operand_size_code());
+      advance = PrintRightOperand(data);
+      AppendToBuffer(",%s", NameOfCPURegister(regop));
+      break;
+    }
+    default:
+      UNREACHABLE();
+      break;
+  }
+  return advance;
+}
+
+
+// Returns number of bytes used by machine instruction, including *data byte.
+// Writes immediate instructions to 'tmp_buffer_'.
+int DisassemblerX64::PrintImmediateOp(byte* data) {
+  bool sign_extension_bit = (*data & 0x02) != 0;
+  byte modrm = *(data + 1);
+  int mod, regop, rm;
+  get_modrm(modrm, &mod, &regop, &rm);
+  const char* mnem = "Imm???";
+  switch (regop) {
+    case 0:
+      mnem = "add";
+      break;
+    case 1:
+      mnem = "or";
+      break;
+    case 2:
+      mnem = "adc";
+      break;
+    case 4:
+      mnem = "and";
+      break;
+    case 5:
+      mnem = "sub";
+      break;
+    case 6:
+      mnem = "xor";
+      break;
+    case 7:
+      mnem = "cmp";
+      break;
+    default:
+      UnimplementedInstruction();
+  }
+  AppendToBuffer("%s ", mnem);
+  int count = PrintRightOperand(data + 1);
+  if (sign_extension_bit) {
+    AppendToBuffer(",0x%x", *(data + 1 + count));
+    return 1 + count + 1 /*int8*/;
+  } else {
+    AppendToBuffer(",0x%x", *reinterpret_cast<int32_t*>(data + 1 + count));
+    return 1 + count + 4 /*int32_t*/;
+  }
+}
+
+
+// Returns number of bytes used, including *data.
+int DisassemblerX64::F7Instruction(byte* data) {
+  assert(*data == 0xF7);
+  byte modrm = *(data + 1);
+  int mod, regop, rm;
+  get_modrm(modrm, &mod, &regop, &rm);
+  if (mod == 3 && regop != 0) {
+    const char* mnem = NULL;
+    switch (regop) {
+      case 2:
+        mnem = "not";
+        break;
+      case 3:
+        mnem = "neg";
+        break;
+      case 4:
+        mnem = "mul";
+        break;
+      case 7:
+        mnem = "idiv";
+        break;
+      default:
+        UnimplementedInstruction();
+    }
+    AppendToBuffer("%s%c %s",
+                   mnem,
+                   operand_size_code(),
+                   NameOfCPURegister(rm));
+    return 2;
+  } else if (mod == 3 && regop == 0) {
+    int32_t imm = *reinterpret_cast<int32_t*>(data + 2);
+    AppendToBuffer("test%c %s,0x%x",
+                   operand_size_code(),
+                   NameOfCPURegister(rm),
+                   imm);
+    return 6;
+  } else if (regop == 0) {
+    AppendToBuffer("test%c ", operand_size_code());
+    int count = PrintRightOperand(data + 1);
+    int32_t imm = *reinterpret_cast<int32_t*>(data + 1 + count);
+    AppendToBuffer(",0x%x", imm);
+    return 1 + count + 4 /*int32_t*/;
+  } else {
+    UnimplementedInstruction();
+    return 2;
+  }
+}
+
+
+int DisassemblerX64::D1D3C1Instruction(byte* data) {
+  byte op = *data;
+  assert(op == 0xD1 || op == 0xD3 || op == 0xC1);
+  byte modrm = *(data + 1);
+  int mod, regop, rm;
+  get_modrm(modrm, &mod, &regop, &rm);
+  ASSERT(regop < 8);
+  int imm8 = -1;
+  int num_bytes = 2;
+  if (mod == 3) {
+    const char* mnem = NULL;
+    if (op == 0xD1) {
+      imm8 = 1;
+      switch (regop) {
+        case 2:
+          mnem = "rcl";
+          break;
+        case 7:
+          mnem = "sar";
+          break;
+        case 4:
+          mnem = "shl";
+          break;
+        default:
+          UnimplementedInstruction();
+      }
+    } else if (op == 0xC1) {
+      imm8 = *(data + 2);
+      num_bytes = 3;
+      switch (regop) {
+        case 2:
+          mnem = "rcl";
+          break;
+        case 4:
+          mnem = "shl";
+          break;
+        case 5:
+          mnem = "shr";
+          break;
+        case 7:
+          mnem = "sar";
+          break;
+        default:
+          UnimplementedInstruction();
+      }
+    } else if (op == 0xD3) {
+      switch (regop) {
+        case 4:
+          mnem = "shl";
+          break;
+        case 5:
+          mnem = "shr";
+          break;
+        case 7:
+          mnem = "sar";
+          break;
+        default:
+          UnimplementedInstruction();
+      }
+    }
+    assert(mnem != NULL);
+    AppendToBuffer("%s%c %s,",
+                   mnem,
+                   operand_size_code(),
+                   NameOfCPURegister(rm));
+    if (imm8 > 0) {
+      AppendToBuffer("%d", imm8);
+    } else {
+      AppendToBuffer("cl");
+    }
+  } else {
+    UnimplementedInstruction();
+  }
+  return num_bytes;
+}
+
+
+// Returns number of bytes used, including *data.
+int DisassemblerX64::JumpShort(byte* data) {
+  assert(*data == 0xEB);
+  byte b = *(data + 1);
+  byte* dest = data + static_cast<int8_t>(b) + 2;
+  AppendToBuffer("jmp %s", NameOfAddress(dest));
+  return 2;
+}
+
+
+// Returns number of bytes used, including *data.
+int DisassemblerX64::JumpConditional(byte* data) {
+  assert(*data == 0x0F);
+  byte cond = *(data + 1) & 0x0F;
+  byte* dest = data + *reinterpret_cast<int32_t*>(data + 2) + 6;
+  const char* mnem = conditional_code_suffix[cond];
+  AppendToBuffer("j%s %s", mnem, NameOfAddress(dest));
+  return 6;  // includes 0x0F
+}
+
+
+// Returns number of bytes used, including *data.
+int DisassemblerX64::JumpConditionalShort(byte* data) {
+  byte cond = *data & 0x0F;
+  byte b = *(data + 1);
+  byte* dest = data + static_cast<int8_t>(b) + 2;
+  const char* mnem = conditional_code_suffix[cond];
+  AppendToBuffer("j%s %s", mnem, NameOfAddress(dest));
+  return 2;
+}
+
+
+// Returns number of bytes used, including *data.
+int DisassemblerX64::SetCC(byte* data) {
+  assert(*data == 0x0F);
+  byte cond = *(data + 1) & 0x0F;
+  const char* mnem = conditional_code_suffix[cond];
+  AppendToBuffer("set%s%c ", mnem, operand_size_code());
+  PrintRightByteOperand(data + 2);
+  return 3;  // includes 0x0F
+}
+
+
+// Returns number of bytes used, including *data.
+int DisassemblerX64::FPUInstruction(byte* data) {
+  byte b1 = *data;
+  byte b2 = *(data + 1);
+  if (b1 == 0xD9) {
+    const char* mnem = NULL;
+    switch (b2) {
+      case 0xE8:
+        mnem = "fld1";
+        break;
+      case 0xEE:
+        mnem = "fldz";
+        break;
+      case 0xE1:
+        mnem = "fabs";
+        break;
+      case 0xE0:
+        mnem = "fchs";
+        break;
+      case 0xF8:
+        mnem = "fprem";
+        break;
+      case 0xF5:
+        mnem = "fprem1";
+        break;
+      case 0xF7:
+        mnem = "fincstp";
+        break;
+      case 0xE4:
+        mnem = "ftst";
+        break;
+    }
+    if (mnem != NULL) {
+      AppendToBuffer("%s", mnem);
+      return 2;
+    } else if ((b2 & 0xF8) == 0xC8) {
+      AppendToBuffer("fxch st%d", b2 & 0x7);
+      return 2;
+    } else {
+      int mod, regop, rm;
+      get_modrm(*(data + 1), &mod, &regop, &rm);
+      const char* mnem = "?";
+      switch (regop) {
+        case 0:
+          mnem = "fld_s";
+          break;
+        case 3:
+          mnem = "fstp_s";
+          break;
+        default:
+          UnimplementedInstruction();
+      }
+      AppendToBuffer("%s ", mnem);
+      int count = PrintRightOperand(data + 1);
+      return count + 1;
+    }
+  } else if (b1 == 0xDD) {
+    if ((b2 & 0xF8) == 0xC0) {
+      AppendToBuffer("ffree st%d", b2 & 0x7);
+      return 2;
+    } else {
+      int mod, regop, rm;
+      get_modrm(*(data + 1), &mod, &regop, &rm);
+      const char* mnem = "?";
+      switch (regop) {
+        case 0:
+          mnem = "fld_d";
+          break;
+        case 3:
+          mnem = "fstp_d";
+          break;
+        default:
+          UnimplementedInstruction();
+      }
+      AppendToBuffer("%s ", mnem);
+      int count = PrintRightOperand(data + 1);
+      return count + 1;
+    }
+  } else if (b1 == 0xDB) {
+    int mod, regop, rm;
+    get_modrm(*(data + 1), &mod, &regop, &rm);
+    const char* mnem = "?";
+    switch (regop) {
+      case 0:
+        mnem = "fild_s";
+        break;
+      case 2:
+        mnem = "fist_s";
+        break;
+      case 3:
+        mnem = "fistp_s";
+        break;
+      default:
+        UnimplementedInstruction();
+    }
+    AppendToBuffer("%s ", mnem);
+    int count = PrintRightOperand(data + 1);
+    return count + 1;
+  } else if (b1 == 0xDF) {
+    if (b2 == 0xE0) {
+      AppendToBuffer("fnstsw_ax");
+      return 2;
+    }
+    int mod, regop, rm;
+    get_modrm(*(data + 1), &mod, &regop, &rm);
+    const char* mnem = "?";
+    switch (regop) {
+      case 5:
+        mnem = "fild_d";
+        break;
+      case 7:
+        mnem = "fistp_d";
+        break;
+      default:
+        UnimplementedInstruction();
+    }
+    AppendToBuffer("%s ", mnem);
+    int count = PrintRightOperand(data + 1);
+    return count + 1;
+  } else if (b1 == 0xDC || b1 == 0xDE) {
+    bool is_pop = (b1 == 0xDE);
+    if (is_pop && b2 == 0xD9) {
+      AppendToBuffer("fcompp");
+      return 2;
+    }
+    const char* mnem = "FP0xDC";
+    switch (b2 & 0xF8) {
+      case 0xC0:
+        mnem = "fadd";
+        break;
+      case 0xE8:
+        mnem = "fsub";
+        break;
+      case 0xC8:
+        mnem = "fmul";
+        break;
+      case 0xF8:
+        mnem = "fdiv";
+        break;
+      default:
+        UnimplementedInstruction();
+    }
+    AppendToBuffer("%s%s st%d", mnem, is_pop ? "p" : "", b2 & 0x7);
+    return 2;
+  } else if (b1 == 0xDA && b2 == 0xE9) {
+    const char* mnem = "fucompp";
+    AppendToBuffer("%s", mnem);
+    return 2;
+  }
+  AppendToBuffer("Unknown FP instruction");
+  return 2;
+}
+
+// Mnemonics for instructions 0xF0 byte.
+// Returns NULL if the instruction is not handled here.
+static const char* F0Mnem(byte f0byte) {
+  switch (f0byte) {
+    case 0x1F:
+      return "nop";
+    case 0x31:
+      return "rdtsc";
+    case 0xA2:
+      return "cpuid";
+    case 0xBE:
+      return "movsxb";
+    case 0xBF:
+      return "movsxw";
+    case 0xB6:
+      return "movzxb";
+    case 0xB7:
+      return "movzxw";
+    case 0xAF:
+      return "imul";
+    case 0xA5:
+      return "shld";
+    case 0xAD:
+      return "shrd";
+    case 0xAB:
+      return "bts";
+    default:
+      return NULL;
+  }
+}
+
+// Disassembled instruction '*instr' and writes it into 'out_buffer'.
+int DisassemblerX64::InstructionDecode(v8::internal::Vector<char> out_buffer,
+                                       byte* instr) {
+  tmp_buffer_pos_ = 0;  // starting to write as position 0
+  byte* data = instr;
+  bool processed = true;  // Will be set to false if the current instruction
+                          // is not in 'instructions' table.
+  byte current;
+
+  // Scan for prefixes.
+  while (true) {
+    current = *data;
+    if (current == 0x66) {
+      setOperandSizePrefix(current);
+      data++;
+    } else if ((current & 0xF0) == 0x40) {
+      setRex(current);
+      if (rex_w()) AppendToBuffer("REX.W ");
+      data++;
+    } else {
+      break;
+    }
+  }
+
+  const InstructionDesc& idesc = instruction_table.Get(current);
+  switch (idesc.type) {
+    case ZERO_OPERANDS_INSTR:
+      AppendToBuffer(idesc.mnem);
+      data++;
+      break;
+
+    case TWO_OPERANDS_INSTR:
+      data++;
+      data += PrintOperands(idesc.mnem, idesc.op_order_, data);
+      break;
+
+    case JUMP_CONDITIONAL_SHORT_INSTR:
+      data += JumpConditionalShort(data);
+      break;
+
+    case REGISTER_INSTR:
+      AppendToBuffer("%s%c %s",
+                     idesc.mnem,
+                     operand_size_code(),
+                     NameOfCPURegister(base_reg(current & 0x07)));
+      data++;
+      break;
+    case PUSHPOP_INSTR:
+      AppendToBuffer("%s %s",
+                     idesc.mnem,
+                     NameOfCPURegister(base_reg(current & 0x07)));
+      data++;
+      break;
+    case MOVE_REG_INSTR: {
+      byte* addr = NULL;
+      switch (operand_size()) {
+        case 16:
+          addr = reinterpret_cast<byte*>(*reinterpret_cast<int16_t*>(data + 1));
+          data += 3;
+          break;
+        case 32:
+          addr = reinterpret_cast<byte*>(*reinterpret_cast<int32_t*>(data + 1));
+          data += 5;
+          break;
+        case 64:
+          addr = reinterpret_cast<byte*>(*reinterpret_cast<int64_t*>(data + 1));
+          data += 9;
+          break;
+        default:
+          UNREACHABLE();
+      }
+      AppendToBuffer("mov%c %s,%s",
+                     operand_size_code(),
+                     NameOfCPURegister(base_reg(current & 0x07)),
+                     NameOfAddress(addr));
+      break;
+    }
+
+    case CALL_JUMP_INSTR: {
+      byte* addr = data + *reinterpret_cast<int32_t*>(data + 1) + 5;
+      AppendToBuffer("%s %s", idesc.mnem, NameOfAddress(addr));
+      data += 5;
+      break;
+    }
+
+    case SHORT_IMMEDIATE_INSTR: {
+      byte* addr =
+          reinterpret_cast<byte*>(*reinterpret_cast<int32_t*>(data + 1));
+      AppendToBuffer("%s rax, %s", idesc.mnem, NameOfAddress(addr));
+      data += 5;
+      break;
+    }
+
+    case NO_INSTR:
+      processed = false;
+      break;
+
+    default:
+      UNIMPLEMENTED();  // This type is not implemented.
+  }
+
+  // The first byte didn't match any of the simple opcodes, so we
+  // need to do special processing on it.
+  if (!processed) {
+    switch (*data) {
+      case 0xC2:
+        AppendToBuffer("ret 0x%x", *reinterpret_cast<uint16_t*>(data + 1));
+        data += 3;
+        break;
+
+      case 0x69:  // fall through
+      case 0x6B: {
+        int mod, regop, rm;
+        get_modrm(*(data + 1), &mod, &regop, &rm);
+        int32_t imm = *data == 0x6B ? *(data + 2)
+            : *reinterpret_cast<int32_t*>(data + 2);
+        AppendToBuffer("imul %s,%s,0x%x", NameOfCPURegister(regop),
+                       NameOfCPURegister(rm), imm);
+        data += 2 + (*data == 0x6B ? 1 : 4);
+      }
+        break;
+
+      case 0xF6: {
+        int mod, regop, rm;
+        get_modrm(*(data + 1), &mod, &regop, &rm);
+        if (mod == 3 && regop == 0) {
+          AppendToBuffer("testb %s,%d", NameOfCPURegister(rm), *(data + 2));
+        } else {
+          UnimplementedInstruction();
+        }
+        data += 3;
+      }
+        break;
+
+      case 0x81:  // fall through
+      case 0x83:  // 0x81 with sign extension bit set
+        data += PrintImmediateOp(data);
+        break;
+
+      case 0x0F: {
+        byte f0byte = *(data + 1);
+        const char* f0mnem = F0Mnem(f0byte);
+        if (f0byte == 0x1F) {
+          data += 1;
+          byte modrm = *data;
+          data += 1;
+          if (((modrm >> 3) & 7) == 4) {
+            // SIB byte present.
+            data += 1;
+          }
+          int mod = modrm >> 6;
+          if (mod == 1) {
+            // Byte displacement.
+            data += 1;
+          } else if (mod == 2) {
+            // 32-bit displacement.
+            data += 4;
+          }
+          AppendToBuffer("nop");
+        } else  if (f0byte == 0xA2 || f0byte == 0x31) {
+          AppendToBuffer("%s", f0mnem);
+          data += 2;
+        } else if ((f0byte & 0xF0) == 0x80) {
+          data += JumpConditional(data);
+        } else if (f0byte == 0xBE || f0byte == 0xBF || f0byte == 0xB6 || f0byte
+            == 0xB7 || f0byte == 0xAF) {
+          data += 2;
+          data += PrintOperands(f0mnem, REG_OPER_OP_ORDER, data);
+        } else if ((f0byte & 0xF0) == 0x90) {
+          data += SetCC(data);
+        } else {
+          data += 2;
+          if (f0byte == 0xAB || f0byte == 0xA5 || f0byte == 0xAD) {
+            // shrd, shld, bts
+            AppendToBuffer("%s ", f0mnem);
+            int mod, regop, rm;
+            get_modrm(*data, &mod, &regop, &rm);
+            data += PrintRightOperand(data);
+            if (f0byte == 0xAB) {
+              AppendToBuffer(",%s", NameOfCPURegister(regop));
+            } else {
+              AppendToBuffer(",%s,cl", NameOfCPURegister(regop));
+            }
+          } else {
+            UnimplementedInstruction();
+          }
+        }
+      }
+        break;
+
+      case 0x8F: {
+        data++;
+        int mod, regop, rm;
+        get_modrm(*data, &mod, &regop, &rm);
+        if (regop == 0) {
+          AppendToBuffer("pop ");
+          data += PrintRightOperand(data);
+        }
+      }
+        break;
+
+      case 0xFF: {
+        data++;
+        int mod, regop, rm;
+        get_modrm(*data, &mod, &regop, &rm);
+        const char* mnem = NULL;
+        switch (regop) {
+          case 0:
+            mnem = "inc";
+            break;
+          case 1:
+            mnem = "dec";
+            break;
+          case 2:
+            mnem = "call";
+            break;
+          case 4:
+            mnem = "jmp";
+            break;
+          case 6:
+            mnem = "push";
+            break;
+          default:
+            mnem = "???";
+        }
+        AppendToBuffer(((regop <= 1) ? "%s%c " : "%s "),
+                       mnem,
+                       operand_size_code());
+        data += PrintRightOperand(data);
+      }
+        break;
+
+      case 0xC7:  // imm32, fall through
+      case 0xC6:  // imm8
+      {
+        bool is_byte = *data == 0xC6;
+        data++;
+
+        AppendToBuffer("mov%c ", is_byte ? 'b' : operand_size_code());
+        data += PrintRightOperand(data);
+        int32_t imm = is_byte ? *data : *reinterpret_cast<int32_t*>(data);
+        AppendToBuffer(",0x%x", imm);
+        data += is_byte ? 1 : 4;
+      }
+        break;
+
+      case 0x80: {
+        data++;
+        AppendToBuffer("cmpb ");
+        data += PrintRightOperand(data);
+        int32_t imm = *data;
+        AppendToBuffer(",0x%x", imm);
+        data++;
+      }
+        break;
+
+      case 0x88:  // 8bit, fall through
+      case 0x89:  // 32bit
+      {
+        bool is_byte = *data == 0x88;
+        int mod, regop, rm;
+        data++;
+        get_modrm(*data, &mod, &regop, &rm);
+        AppendToBuffer("mov%c ", is_byte ? 'b' : operand_size_code());
+        data += PrintRightOperand(data);
+        AppendToBuffer(",%s", NameOfCPURegister(regop));
+      }
+        break;
+
+      case 0x90:
+      case 0x91:
+      case 0x92:
+      case 0x93:
+      case 0x94:
+      case 0x95:
+      case 0x96:
+      case 0x97: {
+        int reg = current & 0x7 | (rex_b() ? 8 : 0);
+        if (reg == 0) {
+          AppendToBuffer("nop");  // Common name for xchg rax,rax.
+        } else {
+          AppendToBuffer("xchg%c rax, %s",
+                         operand_size_code(),
+                         NameOfByteCPURegister(reg));
+        }
+      }
+
+
+      case 0xFE: {
+        data++;
+        int mod, regop, rm;
+        get_modrm(*data, &mod, &regop, &rm);
+        if (mod == 3 && regop == 1) {
+          AppendToBuffer("decb %s", NameOfCPURegister(rm));
+        } else {
+          UnimplementedInstruction();
+        }
+        data++;
+      }
+        break;
+
+      case 0x68:
+        AppendToBuffer("push 0x%x", *reinterpret_cast<int32_t*>(data + 1));
+        data += 5;
+        break;
+
+      case 0x6A:
+        AppendToBuffer("push 0x%x", *reinterpret_cast<int8_t*>(data + 1));
+        data += 2;
+        break;
+
+      case 0xA8:
+        AppendToBuffer("test al,0x%x", *reinterpret_cast<uint8_t*>(data + 1));
+        data += 2;
+        break;
+
+      case 0xA9:
+        AppendToBuffer("test%c rax,0x%x",  // CHECKME!
+                       operand_size_code(),
+                       *reinterpret_cast<int32_t*>(data + 1));
+        data += 5;
+        break;
+
+      case 0xD1:  // fall through
+      case 0xD3:  // fall through
+      case 0xC1:
+        data += D1D3C1Instruction(data);
+        break;
+
+      case 0xD9:  // fall through
+      case 0xDA:  // fall through
+      case 0xDB:  // fall through
+      case 0xDC:  // fall through
+      case 0xDD:  // fall through
+      case 0xDE:  // fall through
+      case 0xDF:
+        data += FPUInstruction(data);
+        break;
+
+      case 0xEB:
+        data += JumpShort(data);
+        break;
+
+      case 0xF2:
+        if (*(data + 1) == 0x0F) {
+          byte b2 = *(data + 2);
+          if (b2 == 0x11) {
+            AppendToBuffer("movsd ");
+            data += 3;
+            int mod, regop, rm;
+            get_modrm(*data, &mod, &regop, &rm);
+            data += PrintRightOperand(data);
+            AppendToBuffer(",%s", NameOfXMMRegister(regop));
+          } else if (b2 == 0x10) {
+            data += 3;
+            int mod, regop, rm;
+            get_modrm(*data, &mod, &regop, &rm);
+            AppendToBuffer("movsd %s,", NameOfXMMRegister(regop));
+            data += PrintRightOperand(data);
+          } else {
+            const char* mnem = "?";
+            switch (b2) {
+              case 0x2A:
+                mnem = "cvtsi2sd";
+                break;
+              case 0x58:
+                mnem = "addsd";
+                break;
+              case 0x59:
+                mnem = "mulsd";
+                break;
+              case 0x5C:
+                mnem = "subsd";
+                break;
+              case 0x5E:
+                mnem = "divsd";
+                break;
+            }
+            data += 3;
+            int mod, regop, rm;
+            get_modrm(*data, &mod, &regop, &rm);
+            if (b2 == 0x2A) {
+              AppendToBuffer("%s %s,", mnem, NameOfXMMRegister(regop));
+              data += PrintRightOperand(data);
+            } else {
+              AppendToBuffer("%s %s,%s", mnem, NameOfXMMRegister(regop),
+                             NameOfXMMRegister(rm));
+              data++;
+            }
+          }
+        } else {
+          UnimplementedInstruction();
+        }
+        break;
+
+      case 0xF3:
+        if (*(data + 1) == 0x0F && *(data + 2) == 0x2C) {
+          data += 3;
+          data += PrintOperands("cvttss2si", REG_OPER_OP_ORDER, data);
+        } else {
+          UnimplementedInstruction();
+        }
+        break;
+
+      case 0xF7:
+        data += F7Instruction(data);
+        break;
+
+      default:
+        UnimplementedInstruction();
+    }
+  }  // !processed
+
+  if (tmp_buffer_pos_ < sizeof tmp_buffer_) {
+    tmp_buffer_[tmp_buffer_pos_] = '\0';
+  }
+
+  int instr_len = data - instr;
+  ASSERT(instr_len > 0);  // Ensure progress.
+
+  int outp = 0;
+  // Instruction bytes.
+  for (byte* bp = instr; bp < data; bp++) {
+    outp += v8::internal::OS::SNPrintF(out_buffer + outp, "%02x", *bp);
+  }
+  for (int i = 6 - instr_len; i >= 0; i--) {
+    outp += v8::internal::OS::SNPrintF(out_buffer + outp, "  ");
+  }
+
+  outp += v8::internal::OS::SNPrintF(out_buffer + outp, " %s",
+                                     tmp_buffer_.start());
+  return instr_len;
+}
+
+//------------------------------------------------------------------------------
+
+
+static const char* cpu_regs[16] = {
+  "rax", "rcx", "rdx", "rbx", "rsp", "rbp", "rsi", "rdi",
+  "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"
+};
+
+
+static const char* byte_cpu_regs[16] = {
+  "al", "cl", "dl", "bl", "spl", "bpl", "sil", "dil",
+  "r8l", "r9l", "r10l", "r11l", "r12l", "r13l", "r14l", "r15l"
+};
+
+
+static const char* xmm_regs[16] = {
+  "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7",
+  "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15"
+};
+
+
+const char* NameConverter::NameOfAddress(byte* addr) const {
+  static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
+  v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
+  return tmp_buffer.start();
+}
+
+
+const char* NameConverter::NameOfConstant(byte* addr) const {
+  return NameOfAddress(addr);
 }
 
 
 const char* NameConverter::NameOfCPURegister(int reg) const {
-  UNIMPLEMENTED();
-  return NULL;
+  if (0 <= reg && reg < 16)
+    return cpu_regs[reg];
+  return "noreg";
 }
 
 
-int Disassembler::ConstantPoolSizeAt(unsigned char* addr) {
-  UNIMPLEMENTED();
-  return 0;
+const char* NameConverter::NameOfByteCPURegister(int reg) const {
+  if (0 <= reg && reg < 16)
+    return byte_cpu_regs[reg];
+  return "noreg";
 }
 
 
+const char* NameConverter::NameOfXMMRegister(int reg) const {
+  if (0 <= reg && reg < 16)
+    return xmm_regs[reg];
+  return "noxmmreg";
+}
+
+
+const char* NameConverter::NameInCode(byte* addr) const {
+  // X64 does not embed debug strings at the moment.
+  UNREACHABLE();
+  return "";
+}
+
+//------------------------------------------------------------------------------
+
+Disassembler::Disassembler(const NameConverter& converter)
+    : converter_(converter) { }
+
+Disassembler::~Disassembler() { }
+
+
 int Disassembler::InstructionDecode(v8::internal::Vector<char> buffer,
-                                    unsigned char* instruction) {
-  UNIMPLEMENTED();
-  return 0;
+                                    byte* instruction) {
+  DisassemblerX64 d(converter_, CONTINUE_ON_UNIMPLEMENTED_OPCODE);
+  return d.InstructionDecode(buffer, instruction);
 }
 
-const char* NameConverter::NameOfByteCPURegister(int a) const {
-  UNIMPLEMENTED();
-  return NULL;
+
+// The X64 assembler does not use constant pools.
+int Disassembler::ConstantPoolSizeAt(byte* instruction) {
+  return -1;
 }
 
-const char* NameConverter::NameOfXMMRegister(int a) const {
-  UNIMPLEMENTED();
-  return NULL;
-}
 
-const char* NameConverter::NameOfConstant(unsigned char* a) const {
-  UNIMPLEMENTED();
-  return NULL;
-}
+void Disassembler::Disassemble(FILE* f, byte* begin, byte* end) {
+  NameConverter converter;
+  Disassembler d(converter);
+  for (byte* pc = begin; pc < end;) {
+    v8::internal::EmbeddedVector<char, 128> buffer;
+    buffer[0] = '\0';
+    byte* prev_pc = pc;
+    pc += d.InstructionDecode(buffer, pc);
+    fprintf(f, "%p", prev_pc);
+    fprintf(f, "    ");
 
-const char* NameConverter::NameInCode(unsigned char* a) const {
-  UNIMPLEMENTED();
-  return NULL;
+    for (byte* bp = prev_pc; bp < pc; bp++) {
+      fprintf(f, "%02x", *bp);
+    }
+    for (int i = 6 - (pc - prev_pc); i >= 0; i--) {
+      fprintf(f, "  ");
+    }
+    fprintf(f, "  %s\n", buffer.start());
+  }
 }
 
 }  // namespace disasm
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index abaffb3..7b8699f 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -212,11 +212,9 @@
   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));  // receiver
   __ testl(rdx, Immediate(kSmiTagMask));
   __ j(zero, &invoke);
-  __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
-  __ movzxbq(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
-  __ cmpq(rcx, Immediate(static_cast<int8_t>(JS_GLOBAL_OBJECT_TYPE)));
+  __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
   __ j(equal, &global);
-  __ cmpq(rcx, Immediate(static_cast<int8_t>(JS_BUILTINS_OBJECT_TYPE)));
+  __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
   __ j(not_equal, &invoke);
 
   // Patch the receiver on the stack.
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 1a0b119..099a461 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -176,7 +176,7 @@
   const char* name = Builtins::GetName(id);
   int argc = Builtins::GetArgumentsCount(id);
 
-  movq(target, code, RelocInfo::EXTERNAL_REFERENCE);  // Is external reference?
+  movq(target, code, RelocInfo::EMBEDDED_OBJECT);
   if (!resolved) {
     uint32_t flags =
         Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
@@ -208,7 +208,9 @@
 
 
 void MacroAssembler::Set(Register dst, int64_t x) {
-  if (is_int32(x)) {
+  if (x == 0) {
+    xor_(dst, dst);
+  } else if (is_int32(x)) {
     movq(dst, Immediate(x));
   } else if (is_uint32(x)) {
     movl(dst, Immediate(x));
@@ -219,14 +221,17 @@
 
 
 void MacroAssembler::Set(const Operand& dst, int64_t x) {
-  if (is_int32(x)) {
-    movq(kScratchRegister, Immediate(x));
+  if (x == 0) {
+    xor_(kScratchRegister, kScratchRegister);
+    movq(dst, kScratchRegister);
+  } else if (is_int32(x)) {
+    movq(dst, Immediate(x));
   } else if (is_uint32(x)) {
-    movl(kScratchRegister, Immediate(x));
+    movl(dst, Immediate(x));
   } else {
     movq(kScratchRegister, x, RelocInfo::NONE);
+    movq(dst, kScratchRegister);
   }
-  movq(dst, kScratchRegister);
 }
 
 
@@ -240,11 +245,13 @@
 
 
 void MacroAssembler::Move(Register dst, Handle<Object> source) {
+  ASSERT(!source->IsFailure());
   if (source->IsSmi()) {
     if (IsUnsafeSmi(source)) {
       LoadUnsafeSmi(dst, source);
     } else {
-      movq(dst, source, RelocInfo::NONE);
+      int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
+      movq(dst, Immediate(smi));
     }
   } else {
     movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
@@ -253,8 +260,13 @@
 
 
 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
-  Move(kScratchRegister, source);
-  movq(dst, kScratchRegister);
+  if (source->IsSmi()) {
+    int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
+    movq(dst, Immediate(smi));
+  } else {
+    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
+    movq(dst, kScratchRegister);
+  }
 }
 
 
@@ -265,14 +277,37 @@
 
 
 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
-  Move(kScratchRegister, source);
-  cmpq(dst, kScratchRegister);
+  if (source->IsSmi()) {
+    if (IsUnsafeSmi(source)) {
+      LoadUnsafeSmi(kScratchRegister, source);
+      cmpl(dst, kScratchRegister);
+    } else {
+      // For smi-comparison, it suffices to compare the low 32 bits.
+      int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
+      cmpl(dst, Immediate(smi));
+    }
+  } else {
+    ASSERT(source->IsHeapObject());
+    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
+    cmpq(dst, kScratchRegister);
+  }
 }
 
 
 void MacroAssembler::Push(Handle<Object> source) {
-  Move(kScratchRegister, source);
-  push(kScratchRegister);
+  if (source->IsSmi()) {
+    if (IsUnsafeSmi(source)) {
+      LoadUnsafeSmi(kScratchRegister, source);
+      push(kScratchRegister);
+    } else {
+      int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source));
+      push(Immediate(smi));
+    }
+  } else {
+    ASSERT(source->IsHeapObject());
+    movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
+    push(kScratchRegister);
+  }
 }
 
 
@@ -589,7 +624,7 @@
   if (!resolved) {
     uint32_t flags =
         Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
-        Bootstrapper::FixupFlagsIsPCRelative::encode(true) |
+        Bootstrapper::FixupFlagsIsPCRelative::encode(false) |
         Bootstrapper::FixupFlagsUseCodeObject::encode(false);
     Unresolved entry =
         { pc_offset() - kTargetAddrToReturnAddrDist, flags, name };