Version 3.3.0

Fixed bug in floating point rounding in Crankshaft on ARM (issue 958)

Fixed a number of issues with running without VFPv3 support on ARM (issue 1315)

Introduced v8Locale.Collator, a partial implementation of Collator per last ECMAScript meeting + mailing list.

Minor performance improvements and bug fixes.

git-svn-id: http://v8.googlecode.com/svn/trunk@7648 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/SConscript b/src/SConscript
index 06ee907..5ebc1cc 100755
--- a/src/SConscript
+++ b/src/SConscript
@@ -297,6 +297,11 @@
 '''.split()
 
 
+EXPERIMENTAL_LIBRARY_FILES = '''
+proxy.js
+'''.split()
+
+
 def Abort(message):
   print message
   sys.exit(1)
@@ -321,9 +326,16 @@
   # compile it.
   library_files = [s for s in LIBRARY_FILES]
   library_files.append('macros.py')
-  libraries_src, libraries_empty_src = env.JS2C(['libraries.cc', 'libraries-empty.cc'], library_files, TYPE='CORE')
+  libraries_src = env.JS2C(['libraries.cc'], library_files, TYPE='CORE')
   libraries_obj = context.ConfigureObject(env, libraries_src, CPPPATH=['.'])
 
+  # Combine the experimental JavaScript library files into a C++ file
+  # and compile it.
+  experimental_library_files = [ s for s in EXPERIMENTAL_LIBRARY_FILES ]
+  experimental_library_files.append('macros.py')
+  experimental_libraries_src = env.JS2C(['experimental-libraries.cc'], experimental_library_files, TYPE='EXPERIMENTAL')
+  experimental_libraries_obj = context.ConfigureObject(env, experimental_libraries_src, CPPPATH=['.'])
+
   source_objs = context.ConfigureObject(env, source_files)
   non_snapshot_files = [source_objs]
 
@@ -340,7 +352,7 @@
   mksnapshot_env = env.Copy()
   mksnapshot_env.Replace(**context.flags['mksnapshot'])
   mksnapshot_src = 'mksnapshot.cc'
-  mksnapshot = mksnapshot_env.Program('mksnapshot', [mksnapshot_src, libraries_obj, non_snapshot_files, empty_snapshot_obj], PDB='mksnapshot.exe.pdb')
+  mksnapshot = mksnapshot_env.Program('mksnapshot', [mksnapshot_src, libraries_obj, experimental_libraries_obj,  non_snapshot_files, empty_snapshot_obj], PDB='mksnapshot.exe.pdb')
   if context.use_snapshot:
     if context.build_snapshot:
       snapshot_cc = env.Snapshot('snapshot.cc', mksnapshot, LOGFILE=File('snapshot.log').abspath)
@@ -349,7 +361,7 @@
     snapshot_obj = context.ConfigureObject(env, snapshot_cc, CPPPATH=['.'])
   else:
     snapshot_obj = empty_snapshot_obj
-  library_objs = [non_snapshot_files, libraries_obj, snapshot_obj]
+  library_objs = [non_snapshot_files, libraries_obj, experimental_libraries_obj, snapshot_obj]
   return (library_objs, d8_objs, [mksnapshot], preparser_objs)
 
 
diff --git a/src/api.cc b/src/api.cc
index a2373cd..1a52174 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -3707,6 +3707,7 @@
 
     // Create the environment.
     env = isolate->bootstrapper()->CreateEnvironment(
+        isolate,
         Utils::OpenHandle(*global_object),
         proxy_template,
         extensions);
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index fd8e8b5..fa97a3b 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -315,6 +315,7 @@
   no_const_pool_before_ = 0;
   last_const_pool_end_ = 0;
   last_bound_pos_ = 0;
+  ast_id_for_reloc_info_ = kNoASTId;
 }
 
 
@@ -2722,7 +2723,14 @@
       }
     }
     ASSERT(buffer_space() >= kMaxRelocSize);  // too late to grow buffer here
-    reloc_info_writer.Write(&rinfo);
+    if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
+      ASSERT(ast_id_for_reloc_info_ != kNoASTId);
+      RelocInfo reloc_info_with_ast_id(pc_, rmode, ast_id_for_reloc_info_);
+      ast_id_for_reloc_info_ = kNoASTId;
+      reloc_info_writer.Write(&reloc_info_with_ast_id);
+    } else {
+      reloc_info_writer.Write(&rinfo);
+    }
   }
 }
 
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 9050c2c..10fc749 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -1166,6 +1166,10 @@
   // Mark address of a debug break slot.
   void RecordDebugBreakSlot();
 
+  // Record the AST id of the CallIC being compiled, so that it can be placed
+  // in the relocation information.
+  void RecordAstId(unsigned ast_id) { ast_id_for_reloc_info_ = ast_id; }
+
   // Record a comment relocation entry that can be used by a disassembler.
   // Use --code-comments to enable.
   void RecordComment(const char* msg);
@@ -1223,6 +1227,11 @@
   void CheckConstPool(bool force_emit, bool require_jump);
 
  protected:
+  // Relocation for a type-recording IC has the AST id added to it.  This
+  // member variable is a way to pass the information from the call site to
+  // the relocation info.
+  unsigned ast_id_for_reloc_info_;
+
   bool emit_debug_code() const { return emit_debug_code_; }
 
   int buffer_space() const { return reloc_info_writer.pos() - pc_; }
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index eecd01d..d66daea 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -308,8 +308,8 @@
 
 
 void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
-  Register exponent = result2_;
-  Register mantissa = result1_;
+  Register exponent = result1_;
+  Register mantissa = result2_;
 
   Label not_special;
   // Convert from Smi to integer.
@@ -517,7 +517,7 @@
     ConvertToDoubleStub stub1(r3, r2, scratch1, scratch2);
     __ push(lr);
     __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
-    // Write Smi from r1 to r1 and r0 in double format.  r9 is scratch.
+    // Write Smi from r1 to r1 and r0 in double format.
     __ mov(scratch1, Operand(r1));
     ConvertToDoubleStub stub2(r1, r0, scratch1, scratch2);
     __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
@@ -682,51 +682,51 @@
   } else {
     Label fewer_than_20_useful_bits;
     // Expected output:
-    // |         dst1            |         dst2            |
+    // |         dst2            |         dst1            |
     // | s |   exp   |              mantissa               |
 
     // Check for zero.
     __ cmp(scratch1, Operand(0));
-    __ mov(dst1, scratch1);
     __ mov(dst2, scratch1);
+    __ mov(dst1, scratch1);
     __ b(eq, &done);
 
     // Preload the sign of the value.
-    __ and_(dst1, scratch1, Operand(HeapNumber::kSignMask), SetCC);
+    __ and_(dst2, scratch1, Operand(HeapNumber::kSignMask), SetCC);
     // Get the absolute value of the object (as an unsigned integer).
     __ rsb(scratch1, scratch1, Operand(0), SetCC, mi);
 
     // Get mantisssa[51:20].
 
     // Get the position of the first set bit.
-    __ CountLeadingZeros(dst2, scratch1, scratch2);
-    __ rsb(dst2, dst2, Operand(31));
+    __ CountLeadingZeros(dst1, scratch1, scratch2);
+    __ rsb(dst1, dst1, Operand(31));
 
     // Set the exponent.
-    __ add(scratch2, dst2, Operand(HeapNumber::kExponentBias));
-    __ Bfi(dst1, scratch2, scratch2,
+    __ add(scratch2, dst1, Operand(HeapNumber::kExponentBias));
+    __ Bfi(dst2, scratch2, scratch2,
         HeapNumber::kExponentShift, HeapNumber::kExponentBits);
 
     // Clear the first non null bit.
     __ mov(scratch2, Operand(1));
-    __ bic(scratch1, scratch1, Operand(scratch2, LSL, dst2));
+    __ bic(scratch1, scratch1, Operand(scratch2, LSL, dst1));
 
-    __ cmp(dst2, Operand(HeapNumber::kMantissaBitsInTopWord));
+    __ cmp(dst1, Operand(HeapNumber::kMantissaBitsInTopWord));
     // Get the number of bits to set in the lower part of the mantissa.
-    __ sub(scratch2, dst2, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
+    __ sub(scratch2, dst1, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
     __ b(mi, &fewer_than_20_useful_bits);
     // Set the higher 20 bits of the mantissa.
-    __ orr(dst1, dst1, Operand(scratch1, LSR, scratch2));
+    __ orr(dst2, dst2, Operand(scratch1, LSR, scratch2));
     __ rsb(scratch2, scratch2, Operand(32));
-    __ mov(dst2, Operand(scratch1, LSL, scratch2));
+    __ mov(dst1, Operand(scratch1, LSL, scratch2));
     __ b(&done);
 
     __ bind(&fewer_than_20_useful_bits);
-    __ rsb(scratch2, dst2, Operand(HeapNumber::kMantissaBitsInTopWord));
+    __ rsb(scratch2, dst1, Operand(HeapNumber::kMantissaBitsInTopWord));
     __ mov(scratch2, Operand(scratch1, LSL, scratch2));
-    __ orr(dst1, dst1, scratch2);
-    // Set dst2 to 0.
-    __ mov(dst2, Operand(0));
+    __ orr(dst2, dst2, scratch2);
+    // Set dst1 to 0.
+    __ mov(dst1, Operand(0));
   }
 
   __ b(&done);
@@ -1817,6 +1817,9 @@
     case TRBinaryOpIC::ODDBALL:
       GenerateOddballStub(masm);
       break;
+    case TRBinaryOpIC::BOTH_STRING:
+      GenerateBothStringStub(masm);
+      break;
     case TRBinaryOpIC::STRING:
       GenerateStringStub(masm);
       break;
@@ -2062,6 +2065,9 @@
                                                          op_,
                                                          result,
                                                          scratch1);
+        if (FLAG_debug_code) {
+          __ stop("Unreachable code.");
+        }
       }
       break;
     }
@@ -2191,6 +2197,7 @@
 // requested the code falls through. If number allocation is requested but a
 // heap number cannot be allocated the code jumps to the lable gc_required.
 void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
+    Label* use_runtime,
     Label* gc_required,
     SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
   Label not_smis;
@@ -2212,7 +2219,7 @@
   // If heap number results are possible generate the result in an allocated
   // heap number.
   if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) {
-    GenerateFPOperation(masm, true, NULL, gc_required);
+    GenerateFPOperation(masm, true, use_runtime, gc_required);
   }
   __ bind(&not_smis);
 }
@@ -2224,11 +2231,14 @@
   if (result_type_ == TRBinaryOpIC::UNINITIALIZED ||
       result_type_ == TRBinaryOpIC::SMI) {
     // Only allow smi results.
-    GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS);
+    GenerateSmiCode(masm, &call_runtime, NULL, NO_HEAPNUMBER_RESULTS);
   } else {
     // Allow heap number result and don't make a transition if a heap number
     // cannot be allocated.
-    GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
+    GenerateSmiCode(masm,
+                    &call_runtime,
+                    &call_runtime,
+                    ALLOW_HEAPNUMBER_RESULTS);
   }
 
   // Code falls through if the result is not returned as either a smi or heap
@@ -2250,6 +2260,36 @@
 }
 
 
+void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
+  Label call_runtime;
+  ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING);
+  ASSERT(op_ == Token::ADD);
+  // If both arguments are strings, call the string add stub.
+  // Otherwise, do a transition.
+
+  // Registers containing left and right operands respectively.
+  Register left = r1;
+  Register right = r0;
+
+  // Test if left operand is a string.
+  __ JumpIfSmi(left, &call_runtime);
+  __ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE);
+  __ b(ge, &call_runtime);
+
+  // Test if right operand is a string.
+  __ JumpIfSmi(right, &call_runtime);
+  __ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE);
+  __ b(ge, &call_runtime);
+
+  StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
+  GenerateRegisterArgsPush(masm);
+  __ TailCallStub(&string_add_stub);
+
+  __ bind(&call_runtime);
+  GenerateTypeTransition(masm);
+}
+
+
 void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
   ASSERT(operands_type_ == TRBinaryOpIC::INT32);
 
@@ -2415,6 +2455,9 @@
         // Call the C function to handle the double operation.
         FloatingPointHelper::CallCCodeForDoubleOperation(
             masm, op_, heap_number_result, scratch1);
+        if (FLAG_debug_code) {
+          __ stop("Unreachable code.");
+        }
       }
 
       break;
@@ -2501,16 +2544,16 @@
       __ Ret();
 
       __ bind(&return_heap_number);
+      heap_number_result = r5;
+      GenerateHeapResultAllocation(masm,
+                                   heap_number_result,
+                                   heap_number_map,
+                                   scratch1,
+                                   scratch2,
+                                   &call_runtime);
+
       if (CpuFeatures::IsSupported(VFP3)) {
         CpuFeatures::Scope scope(VFP3);
-        heap_number_result = r5;
-        GenerateHeapResultAllocation(masm,
-                                     heap_number_result,
-                                     heap_number_map,
-                                     scratch1,
-                                     scratch2,
-                                     &call_runtime);
-
         if (op_ != Token::SHR) {
           // Convert the result to a floating point value.
           __ vmov(double_scratch.low(), r2);
@@ -2529,6 +2572,7 @@
       } else {
         // Tail call that writes the int32 in r2 to the heap number in r0, using
         // r3 as scratch. r0 is preserved and returned.
+        __ mov(r0, r5);
         WriteInt32ToHeapNumberStub stub(r2, r0, r3);
         __ TailCallStub(&stub);
       }
@@ -2595,7 +2639,7 @@
 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
   Label call_runtime, call_string_add_or_runtime;
 
-  GenerateSmiCode(masm, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
+  GenerateSmiCode(masm, &call_runtime, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
 
   GenerateFPOperation(masm, false, &call_string_add_or_runtime, &call_runtime);
 
@@ -3667,7 +3711,7 @@
   __ b(ne, &slow);
 
   // Null is not instance of anything.
-  __ cmp(scratch, Operand(FACTORY->null_value()));
+  __ cmp(scratch, Operand(masm->isolate()->factory()->null_value()));
   __ b(ne, &object_not_null);
   __ mov(r0, Operand(Smi::FromInt(1)));
   __ Ret(HasArgsInRegisters() ? 0 : 2);
@@ -4165,7 +4209,7 @@
 
   __ bind(&failure);
   // For failure and exception return null.
-  __ mov(r0, Operand(FACTORY->null_value()));
+  __ mov(r0, Operand(masm->isolate()->factory()->null_value()));
   __ add(sp, sp, Operand(4 * kPointerSize));
   __ Ret();
 
@@ -4236,6 +4280,8 @@
   const int kMaxInlineLength = 100;
   Label slowcase;
   Label done;
+  Factory* factory = masm->isolate()->factory();
+
   __ ldr(r1, MemOperand(sp, kPointerSize * 2));
   STATIC_ASSERT(kSmiTag == 0);
   STATIC_ASSERT(kSmiTagSize == 1);
@@ -4270,7 +4316,7 @@
   // Interleave operations for better latency.
   __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
   __ add(r3, r0, Operand(JSRegExpResult::kSize));
-  __ mov(r4, Operand(FACTORY->empty_fixed_array()));
+  __ mov(r4, Operand(factory->empty_fixed_array()));
   __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
   __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
   __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
@@ -4291,13 +4337,13 @@
   // r5: Number of elements in array, untagged.
 
   // Set map.
-  __ mov(r2, Operand(FACTORY->fixed_array_map()));
+  __ mov(r2, Operand(factory->fixed_array_map()));
   __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
   // Set FixedArray length.
   __ mov(r6, Operand(r5, LSL, kSmiTagSize));
   __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
   // Fill contents of fixed-array with the-hole.
-  __ mov(r2, Operand(FACTORY->the_hole_value()));
+  __ mov(r2, Operand(factory->the_hole_value()));
   __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   // Fill fixed array elements with hole.
   // r0: JSArray, tagged.
diff --git a/src/arm/code-stubs-arm.h b/src/arm/code-stubs-arm.h
index 811c275..0bb0025 100644
--- a/src/arm/code-stubs-arm.h
+++ b/src/arm/code-stubs-arm.h
@@ -147,6 +147,7 @@
                            Label* not_numbers,
                            Label* gc_required);
   void GenerateSmiCode(MacroAssembler* masm,
+                       Label* use_runtime,
                        Label* gc_required,
                        SmiCodeGenerateHeapNumberResults heapnumber_results);
   void GenerateLoadArguments(MacroAssembler* masm);
@@ -157,6 +158,7 @@
   void GenerateHeapNumberStub(MacroAssembler* masm);
   void GenerateOddballStub(MacroAssembler* masm);
   void GenerateStringStub(MacroAssembler* masm);
+  void GenerateBothStringStub(MacroAssembler* masm);
   void GenerateGenericStub(MacroAssembler* masm);
   void GenerateAddStrings(MacroAssembler* masm);
   void GenerateCallRuntime(MacroAssembler* masm);
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 871b453..c8ab8bc 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -783,7 +783,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
       // Value in r0 is ignored (declarations are statements).
     }
   }
@@ -857,7 +857,7 @@
     // Record position before stub call for type feedback.
     SetSourcePosition(clause->position());
     Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
-    EmitCallIC(ic, &patch_site);
+    EmitCallIC(ic, &patch_site, clause->label()->id());
     __ cmp(r0, Operand(0));
     __ b(ne, &next_test);
     __ Drop(1);  // Switch value is no longer needed.
@@ -1109,6 +1109,67 @@
 }
 
 
+void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
+    Slot* slot,
+    TypeofState typeof_state,
+    Label* slow) {
+  Register current = cp;
+  Register next = r1;
+  Register temp = r2;
+
+  Scope* s = scope();
+  while (s != NULL) {
+    if (s->num_heap_slots() > 0) {
+      if (s->calls_eval()) {
+        // Check that extension is NULL.
+        __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
+        __ tst(temp, temp);
+        __ b(ne, slow);
+      }
+      // Load next context in chain.
+      __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
+      __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
+      // Walk the rest of the chain without clobbering cp.
+      current = next;
+    }
+    // If no outer scope calls eval, we do not need to check more
+    // context extensions.
+    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+    s = s->outer_scope();
+  }
+
+  if (s->is_eval_scope()) {
+    Label loop, fast;
+    if (!current.is(next)) {
+      __ Move(next, current);
+    }
+    __ bind(&loop);
+    // Terminate at global context.
+    __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
+    __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
+    __ cmp(temp, ip);
+    __ b(eq, &fast);
+    // Check that extension is NULL.
+    __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
+    __ tst(temp, temp);
+    __ b(ne, slow);
+    // Load next context in chain.
+    __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
+    __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
+    __ b(&loop);
+    __ bind(&fast);
+  }
+
+  __ ldr(r0, GlobalObjectOperand());
+  __ mov(r2, Operand(slot->var()->name()));
+  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
+      ? RelocInfo::CODE_TARGET
+      : RelocInfo::CODE_TARGET_CONTEXT;
+  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+  EmitCallIC(ic, mode, AstNode::kNoNumber);
+}
+
+
 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
     Slot* slot,
     Label* slow) {
@@ -1187,7 +1248,7 @@
           __ mov(r0, Operand(key_literal->handle()));
           Handle<Code> ic =
               isolate()->builtins()->KeyedLoadIC_Initialize();
-          EmitCallIC(ic, RelocInfo::CODE_TARGET);
+          EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
           __ jmp(done);
         }
       }
@@ -1196,67 +1257,6 @@
 }
 
 
-void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
-    Slot* slot,
-    TypeofState typeof_state,
-    Label* slow) {
-  Register current = cp;
-  Register next = r1;
-  Register temp = r2;
-
-  Scope* s = scope();
-  while (s != NULL) {
-    if (s->num_heap_slots() > 0) {
-      if (s->calls_eval()) {
-        // Check that extension is NULL.
-        __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
-        __ tst(temp, temp);
-        __ b(ne, slow);
-      }
-      // Load next context in chain.
-      __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
-      __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
-      // Walk the rest of the chain without clobbering cp.
-      current = next;
-    }
-    // If no outer scope calls eval, we do not need to check more
-    // context extensions.
-    if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
-    s = s->outer_scope();
-  }
-
-  if (s->is_eval_scope()) {
-    Label loop, fast;
-    if (!current.is(next)) {
-      __ Move(next, current);
-    }
-    __ bind(&loop);
-    // Terminate at global context.
-    __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
-    __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
-    __ cmp(temp, ip);
-    __ b(eq, &fast);
-    // Check that extension is NULL.
-    __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
-    __ tst(temp, temp);
-    __ b(ne, slow);
-    // Load next context in chain.
-    __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
-    __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
-    __ b(&loop);
-    __ bind(&fast);
-  }
-
-  __ ldr(r0, GlobalObjectOperand());
-  __ mov(r2, Operand(slot->var()->name()));
-  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
-      ? RelocInfo::CODE_TARGET
-      : RelocInfo::CODE_TARGET_CONTEXT;
-  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
-  EmitCallIC(ic, mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(Variable* var) {
   // Four cases: non-this global variables, lookup slots, all other
   // types of slots, and parameters that rewrite to explicit property
@@ -1271,7 +1271,7 @@
     __ ldr(r0, GlobalObjectOperand());
     __ mov(r2, Operand(var->name()));
     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
     context()->Plug(r0);
 
   } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
@@ -1330,7 +1330,7 @@
 
     // Call keyed load IC. It has arguments key and receiver in r0 and r1.
     Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
     context()->Plug(r0);
   }
 }
@@ -1438,8 +1438,10 @@
             VisitForAccumulatorValue(value);
             __ mov(r2, Operand(key->handle()));
             __ ldr(r1, MemOperand(sp));
-            Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
-            EmitCallIC(ic, RelocInfo::CODE_TARGET);
+            Handle<Code> ic = is_strict_mode()
+                ? isolate()->builtins()->StoreIC_Initialize_Strict()
+                : isolate()->builtins()->StoreIC_Initialize();
+            EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, key->id());
             PrepareForBailoutForId(key->id(), NO_REGISTERS);
           } else {
             VisitForEffect(value);
@@ -1651,13 +1653,13 @@
     SetSourcePosition(expr->position() + 1);
     AccumulatorValueContext context(this);
     if (ShouldInlineSmiCase(op)) {
-      EmitInlineSmiBinaryOp(expr,
+      EmitInlineSmiBinaryOp(expr->binary_operation(),
                             op,
                             mode,
                             expr->target(),
                             expr->value());
     } else {
-      EmitBinaryOp(op, mode);
+      EmitBinaryOp(expr->binary_operation(), op, mode);
     }
 
     // Deoptimization point in case the binary operation may have side effects.
@@ -1693,7 +1695,11 @@
   __ mov(r2, Operand(key->handle()));
   // Call load IC. It has arguments receiver and property name r0 and r2.
   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  if (prop->is_synthetic()) {
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+  } else {
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, prop->id());
+  }
 }
 
 
@@ -1701,11 +1707,15 @@
   SetSourcePosition(prop->position());
   // Call keyed load IC. It has arguments key and receiver in r0 and r1.
   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  if (prop->is_synthetic()) {
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
+  } else {
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, prop->id());
+  }
 }
 
 
-void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
+void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
                                               Token::Value op,
                                               OverwriteMode mode,
                                               Expression* left_expr,
@@ -1728,7 +1738,7 @@
 
   __ bind(&stub_call);
   TypeRecordingBinaryOpStub stub(op, mode);
-  EmitCallIC(stub.GetCode(), &patch_site);
+  EmitCallIC(stub.GetCode(), &patch_site, expr->id());
   __ jmp(&done);
 
   __ bind(&smi_case);
@@ -1804,11 +1814,12 @@
 }
 
 
-void FullCodeGenerator::EmitBinaryOp(Token::Value op,
+void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
+                                     Token::Value op,
                                      OverwriteMode mode) {
   __ pop(r1);
   TypeRecordingBinaryOpStub stub(op, mode);
-  EmitCallIC(stub.GetCode(), NULL);
+  EmitCallIC(stub.GetCode(), NULL, expr->id());
   context()->Plug(r0);
 }
 
@@ -1848,7 +1859,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->StoreIC_Initialize_Strict()
           : isolate()->builtins()->StoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
       break;
     }
     case KEYED_PROPERTY: {
@@ -1871,7 +1882,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
       break;
     }
   }
@@ -1897,7 +1908,7 @@
     Handle<Code> ic = is_strict_mode()
         ? isolate()->builtins()->StoreIC_Initialize_Strict()
         : isolate()->builtins()->StoreIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
 
   } else if (op == Token::INIT_CONST) {
     // Like var declarations, const declarations are hoisted to function
@@ -2006,7 +2017,7 @@
   Handle<Code> ic = is_strict_mode()
       ? isolate()->builtins()->StoreIC_Initialize_Strict()
       : isolate()->builtins()->StoreIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
 
   // If the assignment ends an initialization block, revert to fast case.
   if (expr->ends_initialization_block()) {
@@ -2052,7 +2063,7 @@
   Handle<Code> ic = is_strict_mode()
       ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
       : isolate()->builtins()->KeyedStoreIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
 
   // If the assignment ends an initialization block, revert to fast case.
   if (expr->ends_initialization_block()) {
@@ -2104,7 +2115,9 @@
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   Handle<Code> ic =
       isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
-  EmitCallIC(ic, mode);
+  unsigned ast_id =
+      (mode == RelocInfo::CODE_TARGET_WITH_ID) ? expr->id() : kNoASTId;
+  EmitCallIC(ic, mode, ast_id);
   RecordJSReturnSite(expr);
   // Restore context register.
   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2139,7 +2152,7 @@
   Handle<Code> ic =
       isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
   __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize));  // Key.
-  EmitCallIC(ic, mode);
+  EmitCallIC(ic, mode, expr->id());
   RecordJSReturnSite(expr);
   // Restore context register.
   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2315,11 +2328,11 @@
       { PreservePositionScope scope(masm()->positions_recorder());
         VisitForStackValue(prop->obj());
       }
-      EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
+      EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET_WITH_ID);
     } else {
       // Call to a keyed property.
       // For a synthetic property use keyed load IC followed by function call,
-      // for a regular property use keyed CallIC.
+      // for a regular property use keyed EmitCallIC.
       if (prop->is_synthetic()) {
         // Do not visit the object and key subexpressions (they are shared
         // by all occurrences of the same rewritten parameter).
@@ -2337,7 +2350,7 @@
         SetSourcePosition(prop->position());
 
         Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-        EmitCallIC(ic, RelocInfo::CODE_TARGET);
+        EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
         __ ldr(r1, GlobalObjectOperand());
         __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
         __ Push(r0, r1);  // Function, receiver.
@@ -2346,7 +2359,7 @@
         { PreservePositionScope scope(masm()->positions_recorder());
           VisitForStackValue(prop->obj());
         }
-        EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
+        EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET_WITH_ID);
       }
     }
   } else {
@@ -3161,15 +3174,14 @@
 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
   ASSERT(args->length() >= 2);
 
-  int arg_count = args->length() - 2;  // For receiver and function.
-  VisitForStackValue(args->at(0));  // Receiver.
-  for (int i = 0; i < arg_count; i++) {
-    VisitForStackValue(args->at(i + 1));
+  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
+  for (int i = 0; i < arg_count + 1; i++) {
+    VisitForStackValue(args->at(i));
   }
-  VisitForAccumulatorValue(args->at(arg_count + 1));  // Function.
+  VisitForAccumulatorValue(args->last());  // Function.
 
-  // InvokeFunction requires function in r1. Move it in there.
-  if (!result_register().is(r1)) __ mov(r1, result_register());
+  // InvokeFunction requires the function in r1. Move it in there.
+  __ mov(r1, result_register());
   ParameterCount count(arg_count);
   __ InvokeFunction(r1, count, CALL_FUNCTION);
   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -3658,7 +3670,7 @@
     __ mov(r2, Operand(expr->name()));
     Handle<Code> ic =
         isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP);
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
     // Restore context register.
     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
   } else {
@@ -3937,7 +3949,7 @@
   SetSourcePosition(expr->position());
 
   TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
-  EmitCallIC(stub.GetCode(), &patch_site);
+  EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
   __ bind(&done);
 
   // Store the value returned in r0.
@@ -3968,7 +3980,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->StoreIC_Initialize_Strict()
           : isolate()->builtins()->StoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3985,7 +3997,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id());
       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -4011,7 +4023,7 @@
     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
     // Use a regular load, not a contextual load, to avoid a reference
     // error.
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
     PrepareForBailout(expr, TOS_REG);
     context()->Plug(r0);
   } else if (proxy != NULL &&
@@ -4214,7 +4226,7 @@
       // Record position and call the compare IC.
       SetSourcePosition(expr->position());
       Handle<Code> ic = CompareIC::GetUninitialized(op);
-      EmitCallIC(ic, &patch_site);
+      EmitCallIC(ic, &patch_site, expr->id());
       PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
       __ cmp(r0, Operand(0));
       Split(cond, if_true, if_false, fall_through);
@@ -4276,9 +4288,12 @@
 }
 
 
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
+                                   RelocInfo::Mode mode,
+                                   unsigned ast_id) {
   ASSERT(mode == RelocInfo::CODE_TARGET ||
-         mode == RelocInfo::CODE_TARGET_CONTEXT);
+         mode == RelocInfo::CODE_TARGET_CONTEXT ||
+         mode == RelocInfo::CODE_TARGET_WITH_ID);
   Counters* counters = isolate()->counters();
   switch (ic->kind()) {
     case Code::LOAD_IC:
@@ -4295,12 +4310,19 @@
     default:
       break;
   }
-
-  __ Call(ic, mode);
+  if (mode == RelocInfo::CODE_TARGET_WITH_ID) {
+    ASSERT(ast_id != kNoASTId);
+    __ CallWithAstId(ic, mode, ast_id);
+  } else {
+    ASSERT(ast_id == kNoASTId);
+    __ Call(ic, mode);
+  }
 }
 
 
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
+                                   JumpPatchSite* patch_site,
+                                   unsigned ast_id) {
   Counters* counters = isolate()->counters();
   switch (ic->kind()) {
     case Code::LOAD_IC:
@@ -4318,7 +4340,11 @@
       break;
   }
 
-  __ Call(ic, RelocInfo::CODE_TARGET);
+  if (ast_id != kNoASTId) {
+    __ CallWithAstId(ic, RelocInfo::CODE_TARGET_WITH_ID, ast_id);
+  } else {
+    __ Call(ic, RelocInfo::CODE_TARGET);
+  }
   if (patch_site != NULL && patch_site->is_bound()) {
     patch_site->EmitPatchInfo();
   } else {
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 8acf7c2..db04f33 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -926,217 +926,6 @@
   __ TailCallExternalReference(ref, 2, 1);
 }
 
-// Returns the code marker, or the 0 if the code is not marked.
-static inline int InlinedICSiteMarker(Address address,
-                                      Address* inline_end_address) {
-  if (V8::UseCrankshaft()) return false;
-
-  // If the instruction after the call site is not the pseudo instruction nop1
-  // then this is not related to an inlined in-object property load. The nop1
-  // instruction is located just after the call to the IC in the deferred code
-  // handling the miss in the inlined code. After the nop1 instruction there is
-  // a branch instruction for jumping back from the deferred code.
-  Address address_after_call = address + Assembler::kCallTargetAddressOffset;
-  Instr instr_after_call = Assembler::instr_at(address_after_call);
-  int code_marker = MacroAssembler::GetCodeMarker(instr_after_call);
-
-  // A negative result means the code is not marked.
-  if (code_marker <= 0) return 0;
-
-  Address address_after_nop = address_after_call + Assembler::kInstrSize;
-  Instr instr_after_nop = Assembler::instr_at(address_after_nop);
-  // There may be some reg-reg move and frame merging code to skip over before
-  // the branch back from the DeferredReferenceGetKeyedValue code to the inlined
-  // code.
-  while (!Assembler::IsBranch(instr_after_nop)) {
-    address_after_nop += Assembler::kInstrSize;
-    instr_after_nop = Assembler::instr_at(address_after_nop);
-  }
-
-  // Find the end of the inlined code for handling the load.
-  int b_offset =
-      Assembler::GetBranchOffset(instr_after_nop) + Assembler::kPcLoadDelta;
-  ASSERT(b_offset < 0);  // Jumping back from deferred code.
-  *inline_end_address = address_after_nop + b_offset;
-
-  return code_marker;
-}
-
-
-bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
-  if (V8::UseCrankshaft()) return false;
-
-  // Find the end of the inlined code for handling the load if this is an
-  // inlined IC call site.
-  Address inline_end_address = 0;
-  if (InlinedICSiteMarker(address, &inline_end_address)
-      != Assembler::PROPERTY_ACCESS_INLINED) {
-    return false;
-  }
-
-  // Patch the offset of the property load instruction (ldr r0, [r1, #+XXX]).
-  // The immediate must be representable in 12 bits.
-  ASSERT((JSObject::kMaxInstanceSize - JSObject::kHeaderSize) < (1 << 12));
-  Address ldr_property_instr_address =
-      inline_end_address - Assembler::kInstrSize;
-  ASSERT(Assembler::IsLdrRegisterImmediate(
-      Assembler::instr_at(ldr_property_instr_address)));
-  Instr ldr_property_instr = Assembler::instr_at(ldr_property_instr_address);
-  ldr_property_instr = Assembler::SetLdrRegisterImmediateOffset(
-      ldr_property_instr, offset - kHeapObjectTag);
-  Assembler::instr_at_put(ldr_property_instr_address, ldr_property_instr);
-
-  // Indicate that code has changed.
-  CPU::FlushICache(ldr_property_instr_address, 1 * Assembler::kInstrSize);
-
-  // Patch the map check.
-  // For PROPERTY_ACCESS_INLINED, the load map instruction is generated
-  // 4 instructions before the end of the inlined code.
-  // See codgen-arm.cc CodeGenerator::EmitNamedLoad.
-  int ldr_map_offset = -4;
-  Address ldr_map_instr_address =
-      inline_end_address + ldr_map_offset * Assembler::kInstrSize;
-  Assembler::set_target_address_at(ldr_map_instr_address,
-                                   reinterpret_cast<Address>(map));
-  return true;
-}
-
-
-bool LoadIC::PatchInlinedContextualLoad(Address address,
-                                        Object* map,
-                                        Object* cell,
-                                        bool is_dont_delete) {
-  // Find the end of the inlined code for handling the contextual load if
-  // this is inlined IC call site.
-  Address inline_end_address = 0;
-  int marker = InlinedICSiteMarker(address, &inline_end_address);
-  if (!((marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT) ||
-        (marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE))) {
-    return false;
-  }
-  // On ARM we don't rely on the is_dont_delete argument as the hint is already
-  // embedded in the code marker.
-  bool marker_is_dont_delete =
-      marker == Assembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE;
-
-  // These are the offsets from the end of the inlined code.
-  // See codgen-arm.cc CodeGenerator::EmitNamedLoad.
-  int ldr_map_offset = marker_is_dont_delete ? -5: -8;
-  int ldr_cell_offset = marker_is_dont_delete ? -2: -5;
-  if (FLAG_debug_code && marker_is_dont_delete) {
-    // Three extra instructions were generated to check for the_hole_value.
-    ldr_map_offset -= 3;
-    ldr_cell_offset -= 3;
-  }
-  Address ldr_map_instr_address =
-      inline_end_address + ldr_map_offset * Assembler::kInstrSize;
-  Address ldr_cell_instr_address =
-      inline_end_address + ldr_cell_offset * Assembler::kInstrSize;
-
-  // Patch the map check.
-  Assembler::set_target_address_at(ldr_map_instr_address,
-                                   reinterpret_cast<Address>(map));
-  // Patch the cell address.
-  Assembler::set_target_address_at(ldr_cell_instr_address,
-                                   reinterpret_cast<Address>(cell));
-
-  return true;
-}
-
-
-bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
-  if (V8::UseCrankshaft()) return false;
-
-  // Find the end of the inlined code for the store if there is an
-  // inlined version of the store.
-  Address inline_end_address = 0;
-  if (InlinedICSiteMarker(address, &inline_end_address)
-      != Assembler::PROPERTY_ACCESS_INLINED) {
-    return false;
-  }
-
-  // Compute the address of the map load instruction.
-  Address ldr_map_instr_address =
-      inline_end_address -
-      (CodeGenerator::GetInlinedNamedStoreInstructionsAfterPatch() *
-       Assembler::kInstrSize);
-
-  // Update the offsets if initializing the inlined store. No reason
-  // to update the offsets when clearing the inlined version because
-  // it will bail out in the map check.
-  if (map != HEAP->null_value()) {
-    // Patch the offset in the actual store instruction.
-    Address str_property_instr_address =
-        ldr_map_instr_address + 3 * Assembler::kInstrSize;
-    Instr str_property_instr = Assembler::instr_at(str_property_instr_address);
-    ASSERT(Assembler::IsStrRegisterImmediate(str_property_instr));
-    str_property_instr = Assembler::SetStrRegisterImmediateOffset(
-        str_property_instr, offset - kHeapObjectTag);
-    Assembler::instr_at_put(str_property_instr_address, str_property_instr);
-
-    // Patch the offset in the add instruction that is part of the
-    // write barrier.
-    Address add_offset_instr_address =
-        str_property_instr_address + Assembler::kInstrSize;
-    Instr add_offset_instr = Assembler::instr_at(add_offset_instr_address);
-    ASSERT(Assembler::IsAddRegisterImmediate(add_offset_instr));
-    add_offset_instr = Assembler::SetAddRegisterImmediateOffset(
-        add_offset_instr, offset - kHeapObjectTag);
-    Assembler::instr_at_put(add_offset_instr_address, add_offset_instr);
-
-    // Indicate that code has changed.
-    CPU::FlushICache(str_property_instr_address, 2 * Assembler::kInstrSize);
-  }
-
-  // Patch the map check.
-  Assembler::set_target_address_at(ldr_map_instr_address,
-                                   reinterpret_cast<Address>(map));
-
-  return true;
-}
-
-
-bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
-  if (V8::UseCrankshaft()) return false;
-
-  Address inline_end_address = 0;
-  if (InlinedICSiteMarker(address, &inline_end_address)
-      != Assembler::PROPERTY_ACCESS_INLINED) {
-    return false;
-  }
-
-  // Patch the map check.
-  Address ldr_map_instr_address =
-      inline_end_address -
-      (CodeGenerator::GetInlinedKeyedLoadInstructionsAfterPatch() *
-      Assembler::kInstrSize);
-  Assembler::set_target_address_at(ldr_map_instr_address,
-                                   reinterpret_cast<Address>(map));
-  return true;
-}
-
-
-bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
-  if (V8::UseCrankshaft()) return false;
-
-  // Find the end of the inlined code for handling the store if this is an
-  // inlined IC call site.
-  Address inline_end_address = 0;
-  if (InlinedICSiteMarker(address, &inline_end_address)
-      != Assembler::PROPERTY_ACCESS_INLINED) {
-    return false;
-  }
-
-  // Patch the map check.
-  Address ldr_map_instr_address =
-      inline_end_address -
-      (CodeGenerator::kInlinedKeyedStoreInstructionsAfterPatch *
-      Assembler::kInstrSize);
-  Assembler::set_target_address_at(ldr_map_instr_address,
-                                   reinterpret_cast<Address>(map));
-  return true;
-}
-
 
 Object* KeyedLoadIC_Miss(Arguments args);
 
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index 3f1d15b..faf6404 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -61,22 +61,21 @@
 
 #ifdef DEBUG
 void LInstruction::VerifyCall() {
-  // Call instructions can use only fixed registers as
-  // temporaries and outputs because all registers
-  // are blocked by the calling convention.
-  // Inputs must use a fixed register.
+  // Call instructions can use only fixed registers as temporaries and
+  // outputs because all registers are blocked by the calling convention.
+  // Inputs operands must use a fixed register or use-at-start policy or
+  // a non-register policy.
   ASSERT(Output() == NULL ||
          LUnallocated::cast(Output())->HasFixedPolicy() ||
          !LUnallocated::cast(Output())->HasRegisterPolicy());
   for (UseIterator it(this); it.HasNext(); it.Advance()) {
-    LOperand* operand = it.Next();
-    ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
-           !LUnallocated::cast(operand)->HasRegisterPolicy());
+    LUnallocated* operand = LUnallocated::cast(it.Next());
+    ASSERT(operand->HasFixedPolicy() ||
+           operand->IsUsedAtStart());
   }
   for (TempIterator it(this); it.HasNext(); it.Advance()) {
-    LOperand* operand = it.Next();
-    ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
-           !LUnallocated::cast(operand)->HasRegisterPolicy());
+    LUnallocated* operand = LUnallocated::cast(it.Next());
+    ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
   }
 }
 #endif
@@ -301,6 +300,13 @@
 }
 
 
+void LInvokeFunction::PrintDataTo(StringStream* stream) {
+  stream->Add("= ");
+  InputAt(0)->PrintTo(stream);
+  stream->Add(" #%d / ", arity());
+}
+
+
 void LCallKeyed::PrintDataTo(StringStream* stream) {
   stream->Add("[r2] #%d / ", arity());
 }
@@ -1212,6 +1218,14 @@
 }
 
 
+LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
+  LOperand* function = UseFixed(instr->function(), r1);
+  argument_count_ -= instr->argument_count();
+  LInvokeFunction* result = new LInvokeFunction(function);
+  return MarkAsCall(DefineFixed(result, r0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
+}
+
+
 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
   BuiltinFunctionId op = instr->op();
   if (op == kMathLog || op == kMathSin || op == kMathCos) {
@@ -1946,6 +1960,13 @@
 }
 
 
+LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
+  LOperand* left = UseRegisterAtStart(instr->left());
+  LOperand* right = UseRegisterAtStart(instr->right());
+  return MarkAsCall(DefineFixed(new LStringAdd(left, right), r0), instr);
+}
+
+
 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
   LOperand* string = UseRegister(instr->string());
   LOperand* index = UseRegisterOrConstant(instr->index());
diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h
index 6da7c86..4add6bf 100644
--- a/src/arm/lithium-arm.h
+++ b/src/arm/lithium-arm.h
@@ -106,6 +106,7 @@
   V(InstanceOfAndBranch)                        \
   V(InstanceOfKnownGlobal)                      \
   V(Integer32ToDouble)                          \
+  V(InvokeFunction)                             \
   V(IsNull)                                     \
   V(IsNullAndBranch)                            \
   V(IsObject)                                   \
@@ -152,6 +153,7 @@
   V(StoreKeyedSpecializedArrayElement)          \
   V(StoreNamedField)                            \
   V(StoreNamedGeneric)                          \
+  V(StringAdd)                                  \
   V(StringCharCodeAt)                           \
   V(StringCharFromCode)                         \
   V(StringLength)                               \
@@ -1412,6 +1414,23 @@
 };
 
 
+class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
+ public:
+  explicit LInvokeFunction(LOperand* function) {
+    inputs_[0] = function;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
+  DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
+
+  LOperand* function() { return inputs_[0]; }
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  int arity() const { return hydrogen()->argument_count() - 1; }
+};
+
+
 class LCallKeyed: public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LCallKeyed(LOperand* key) {
@@ -1706,6 +1725,22 @@
 };
 
 
+class LStringAdd: public LTemplateInstruction<1, 2, 0> {
+ public:
+  LStringAdd(LOperand* left, LOperand* right) {
+    inputs_[0] = left;
+    inputs_[1] = right;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
+  DECLARE_HYDROGEN_ACCESSOR(StringAdd)
+
+  LOperand* left() { return inputs_[0]; }
+  LOperand* right() { return inputs_[1]; }
+};
+
+
+
 class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
  public:
   LStringCharCodeAt(LOperand* string, LOperand* index) {
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 4912449..2d415cb 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -91,7 +91,7 @@
 
 void LCodeGen::FinishCode(Handle<Code> code) {
   ASSERT(is_done());
-  code->set_stack_slots(StackSlotCount());
+  code->set_stack_slots(GetStackSlotCount());
   code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
   PopulateDeoptimizationData(code);
   Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
@@ -149,7 +149,7 @@
   __ add(fp, sp, Operand(2 * kPointerSize));  // Adjust FP to point to saved FP.
 
   // Reserve space for the stack slots needed by the code.
-  int slots = StackSlotCount();
+  int slots = GetStackSlotCount();
   if (slots > 0) {
     if (FLAG_debug_code) {
       __ mov(r0, Operand(slots));
@@ -263,7 +263,7 @@
 
 bool LCodeGen::GenerateSafepointTable() {
   ASSERT(is_done());
-  safepoints_.Emit(masm(), StackSlotCount());
+  safepoints_.Emit(masm(), GetStackSlotCount());
   return !is_aborted();
 }
 
@@ -459,7 +459,7 @@
     translation->StoreDoubleStackSlot(op->index());
   } else if (op->IsArgument()) {
     ASSERT(is_tagged);
-    int src_index = StackSlotCount() + op->index();
+    int src_index = GetStackSlotCount() + op->index();
     translation->StoreStackSlot(src_index);
   } else if (op->IsRegister()) {
     Register reg = ToRegister(op);
@@ -2180,7 +2180,7 @@
     __ push(r0);
     __ CallRuntime(Runtime::kTraceExit, 1);
   }
-  int32_t sp_delta = (ParameterCount() + 1) * kPointerSize;
+  int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
   __ mov(sp, fp);
   __ ldm(ia_w, sp, fp.bit() | lr.bit());
   __ add(sp, sp, Operand(sp_delta));
@@ -2861,9 +2861,49 @@
 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
   DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
   Register result = ToRegister(instr->result());
-  Register scratch1 = scratch0();
-  Register scratch2 = result;
-  __ EmitVFPTruncate(kRoundToNearest,
+  Register scratch1 = result;
+  Register scratch2 = scratch0();
+  Label done, check_sign_on_zero;
+
+  // Extract exponent bits.
+  __ vmov(scratch1, input.high());
+  __ ubfx(scratch2,
+          scratch1,
+          HeapNumber::kExponentShift,
+          HeapNumber::kExponentBits);
+
+  // If the number is in ]-0.5, +0.5[, the result is +/- 0.
+  __ cmp(scratch2, Operand(HeapNumber::kExponentBias - 2));
+  __ mov(result, Operand(0), LeaveCC, le);
+  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+    __ b(le, &check_sign_on_zero);
+  } else {
+    __ b(le, &done);
+  }
+
+  // The following conversion will not work with numbers
+  // outside of ]-2^32, 2^32[.
+  __ cmp(scratch2, Operand(HeapNumber::kExponentBias + 32));
+  DeoptimizeIf(ge, instr->environment());
+
+  // Save the original sign for later comparison.
+  __ and_(scratch2, scratch1, Operand(HeapNumber::kSignMask));
+
+  __ vmov(double_scratch0(), 0.5);
+  __ vadd(input, input, double_scratch0());
+
+  // Check sign of the result: if the sign changed, the input
+  // value was in ]0.5, 0[ and the result should be -0.
+  __ vmov(scratch1, input.high());
+  __ eor(scratch1, scratch1, Operand(scratch2), SetCC);
+  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+    DeoptimizeIf(mi, instr->environment());
+  } else {
+    __ mov(result, Operand(0), LeaveCC, mi);
+    __ b(mi, &done);
+  }
+
+  __ EmitVFPTruncate(kRoundToMinusInf,
                      double_scratch0().low(),
                      input,
                      scratch1,
@@ -2873,14 +2913,14 @@
 
   if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
     // Test for -0.
-    Label done;
     __ cmp(result, Operand(0));
     __ b(ne, &done);
+    __ bind(&check_sign_on_zero);
     __ vmov(scratch1, input.high());
     __ tst(scratch1, Operand(HeapNumber::kSignMask));
     DeoptimizeIf(ne, instr->environment());
-    __ bind(&done);
   }
+  __ bind(&done);
 }
 
 
@@ -3025,6 +3065,21 @@
 }
 
 
+void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
+  ASSERT(ToRegister(instr->function()).is(r1));
+  ASSERT(instr->HasPointerMap());
+  ASSERT(instr->HasDeoptimizationEnvironment());
+  LPointerMap* pointers = instr->pointer_map();
+  LEnvironment* env = instr->deoptimization_environment();
+  RecordPosition(pointers->position());
+  RegisterEnvironmentForDeoptimization(env);
+  SafepointGenerator generator(this, pointers, env->deoptimization_index());
+  ParameterCount count(instr->arity());
+  __ InvokeFunction(r1, count, CALL_FUNCTION, &generator);
+  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+}
+
+
 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
   ASSERT(ToRegister(instr->result()).is(r0));
 
@@ -3223,6 +3278,14 @@
 }
 
 
+void LCodeGen::DoStringAdd(LStringAdd* instr) {
+  __ push(ToRegister(instr->left()));
+  __ push(ToRegister(instr->right()));
+  StringAddStub stub(NO_STRING_CHECK_IN_STUB);
+  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+}
+
+
 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
   class DeferredStringCharCodeAt: public LDeferredCode {
    public:
diff --git a/src/arm/lithium-codegen-arm.h b/src/arm/lithium-codegen-arm.h
index 8a4ea27..1110ea6 100644
--- a/src/arm/lithium-codegen-arm.h
+++ b/src/arm/lithium-codegen-arm.h
@@ -158,8 +158,8 @@
                        Register temporary,
                        Register temporary2);
 
-  int StackSlotCount() const { return chunk()->spill_slot_count(); }
-  int ParameterCount() const { return scope()->num_parameters(); }
+  int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
+  int GetParameterCount() const { return scope()->num_parameters(); }
 
   void Abort(const char* format, ...);
   void Comment(const char* format, ...);
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 6a095d3..c800ffc 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -148,8 +148,9 @@
 }
 
 
-void MacroAssembler::Call(
-    intptr_t target, RelocInfo::Mode rmode, Condition cond) {
+void MacroAssembler::Call(intptr_t target,
+                          RelocInfo::Mode rmode,
+                          Condition cond) {
   // Block constant pool for the call instruction sequence.
   BlockConstPoolScope block_const_pool(this);
 #ifdef DEBUG
@@ -214,8 +215,31 @@
 }
 
 
-void MacroAssembler::Call(
-    Handle<Code> code, RelocInfo::Mode rmode, Condition cond) {
+void MacroAssembler::CallWithAstId(Handle<Code> code,
+                                   RelocInfo::Mode rmode,
+                                   unsigned ast_id,
+                                   Condition cond) {
+#ifdef DEBUG
+  int pre_position = pc_offset();
+#endif
+
+  ASSERT(rmode == RelocInfo::CODE_TARGET_WITH_ID);
+  ASSERT(ast_id != kNoASTId);
+  ASSERT(ast_id_for_reloc_info_ == kNoASTId);
+  ast_id_for_reloc_info_ = ast_id;
+  // 'code' is always generated ARM code, never THUMB code
+  Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
+
+#ifdef DEBUG
+  int post_position = pc_offset();
+  CHECK_EQ(pre_position + CallSize(code, rmode, cond), post_position);
+#endif
+}
+
+
+void MacroAssembler::Call(Handle<Code> code,
+                          RelocInfo::Mode rmode,
+                          Condition cond) {
 #ifdef DEBUG
   int pre_position = pc_offset();
 #endif
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index ab5efb0..1ca16ae 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -105,7 +105,13 @@
   int CallSize(byte* target, RelocInfo::Mode rmode, Condition cond = al);
   void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
   int CallSize(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
-  void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
+  void Call(Handle<Code> code,
+            RelocInfo::Mode rmode,
+            Condition cond = al);
+  void CallWithAstId(Handle<Code> code,
+            RelocInfo::Mode rmode,
+            unsigned ast_id,
+            Condition cond = al);
   void Ret(Condition cond = al);
 
   // Emit code to discard a non-negative number of pointer-sized elements
@@ -958,7 +964,9 @@
 
   void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
   int CallSize(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
-  void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
+  void Call(intptr_t target,
+            RelocInfo::Mode rmode,
+            Condition cond = al);
 
   // Helper functions for generating invokes.
   void InvokePrologue(const ParameterCount& expected,
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index ccd79d3..da554c2 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -67,6 +67,7 @@
   Simulator* sim_;
 
   int32_t GetRegisterValue(int regnum);
+  double GetRegisterPairDoubleValue(int regnum);
   double GetVFPDoubleRegisterValue(int regnum);
   bool GetValue(const char* desc, int32_t* value);
   bool GetVFPSingleValue(const char* desc, float* value);
@@ -168,6 +169,11 @@
 }
 
 
+double ArmDebugger::GetRegisterPairDoubleValue(int regnum) {
+  return sim_->get_double_from_register_pair(regnum);
+}
+
+
 double ArmDebugger::GetVFPDoubleRegisterValue(int regnum) {
   return sim_->get_double_from_d_register(regnum);
 }
@@ -305,14 +311,22 @@
         // Leave the debugger shell.
         done = true;
       } else if ((strcmp(cmd, "p") == 0) || (strcmp(cmd, "print") == 0)) {
-        if (argc == 2) {
+        if (argc == 2 || (argc == 3 && strcmp(arg2, "fp") == 0)) {
           int32_t value;
           float svalue;
           double dvalue;
           if (strcmp(arg1, "all") == 0) {
             for (int i = 0; i < kNumRegisters; i++) {
               value = GetRegisterValue(i);
-              PrintF("%3s: 0x%08x %10d\n", Registers::Name(i), value, value);
+              PrintF("%3s: 0x%08x %10d", Registers::Name(i), value, value);
+              if ((argc == 3 && strcmp(arg2, "fp") == 0) &&
+                  i < 8 &&
+                  (i % 2) == 0) {
+                dvalue = GetRegisterPairDoubleValue(i);
+                PrintF(" (%f)\n", dvalue);
+              } else {
+                PrintF("\n");
+              }
             }
             for (int i = 0; i < kNumVFPDoubleRegisters; i++) {
               dvalue = GetVFPDoubleRegisterValue(i);
@@ -550,6 +564,7 @@
         PrintF("print <register>\n");
         PrintF("  print register content (alias 'p')\n");
         PrintF("  use register name 'all' to print all registers\n");
+        PrintF("  add argument 'fp' to print register pair double values\n");
         PrintF("printobject <register>\n");
         PrintF("  print an object from a register (alias 'po')\n");
         PrintF("flags\n");
@@ -873,6 +888,19 @@
 }
 
 
+double Simulator::get_double_from_register_pair(int reg) {
+  ASSERT((reg >= 0) && (reg < num_registers) && ((reg % 2) == 0));
+
+  double dm_val = 0.0;
+  // Read the bits from the unsigned integer register_[] array
+  // into the double precision floating point value and return it.
+  char buffer[2 * sizeof(vfp_register[0])];
+  memcpy(buffer, &registers_[reg], 2 * sizeof(registers_[0]));
+  memcpy(&dm_val, buffer, 2 * sizeof(registers_[0]));
+  return(dm_val);
+}
+
+
 void Simulator::set_dw_register(int dreg, const int* dbl) {
   ASSERT((dreg >= 0) && (dreg < num_d_registers));
   registers_[dreg] = dbl[0];
diff --git a/src/arm/simulator-arm.h b/src/arm/simulator-arm.h
index 436d1c9..a16cae5 100644
--- a/src/arm/simulator-arm.h
+++ b/src/arm/simulator-arm.h
@@ -155,6 +155,7 @@
   // instruction.
   void set_register(int reg, int32_t value);
   int32_t get_register(int reg) const;
+  double get_double_from_register_pair(int reg);
   void set_dw_register(int dreg, const int* dbl);
 
   // Support for VFP.
diff --git a/src/assembler.cc b/src/assembler.cc
index ca30e19..77c7b68 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -30,7 +30,7 @@
 
 // The original source code covered by the above license above has been
 // modified significantly by Google Inc.
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 
 #include "v8.h"
 
@@ -87,58 +87,85 @@
 // -----------------------------------------------------------------------------
 // Implementation of RelocInfoWriter and RelocIterator
 //
+// Relocation information is written backwards in memory, from high addresses
+// towards low addresses, byte by byte.  Therefore, in the encodings listed
+// below, the first byte listed it at the highest address, and successive
+// bytes in the record are at progressively lower addresses.
+//
 // Encoding
 //
 // The most common modes are given single-byte encodings.  Also, it is
 // easy to identify the type of reloc info and skip unwanted modes in
 // an iteration.
 //
-// The encoding relies on the fact that there are less than 14
-// different relocation modes.
+// The encoding relies on the fact that there are fewer than 14
+// different non-compactly encoded relocation modes.
 //
-// embedded_object:    [6 bits pc delta] 00
+// The first byte of a relocation record has a tag in its low 2 bits:
+// Here are the record schemes, depending on the low tag and optional higher
+// tags.
 //
-// code_taget:         [6 bits pc delta] 01
+// Low tag:
+//   00: embedded_object:      [6-bit pc delta] 00
 //
-// position:           [6 bits pc delta] 10,
-//                     [7 bits signed data delta] 0
+//   01: code_target:          [6-bit pc delta] 01
 //
-// statement_position: [6 bits pc delta] 10,
-//                     [7 bits signed data delta] 1
+//   10: short_data_record:    [6-bit pc delta] 10 followed by
+//                             [6-bit data delta] [2-bit data type tag]
 //
-// any nondata mode:   00 [4 bits rmode] 11,  // rmode: 0..13 only
-//                     00 [6 bits pc delta]
+//   11: long_record           [2-bit high tag][4 bit middle_tag] 11
+//                             followed by variable data depending on type.
 //
-// pc-jump:            00 1111 11,
-//                     00 [6 bits pc delta]
+//  2-bit data type tags, used in short_data_record and data_jump long_record:
+//   code_target_with_id: 00
+//   position:            01
+//   statement_position:  10
+//   comment:             11 (not used in short_data_record)
 //
-// pc-jump:            01 1111 11,
-// (variable length)   7 - 26 bit pc delta, written in chunks of 7
-//                     bits, the lowest 7 bits written first.
+//  Long record format:
+//    4-bit middle_tag:
+//      0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2
+//         (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM,
+//          and is between 0000 and 1100)
+//        The format is:
+//                              00 [4 bit middle_tag] 11 followed by
+//                              00 [6 bit pc delta]
 //
-// data-jump + pos:    00 1110 11,
-//                     signed intptr_t, lowest byte written first
+//      1101: not used (would allow one more relocation mode to be added)
+//      1110: long_data_record
+//        The format is:       [2-bit data_type_tag] 1110 11
+//                             signed intptr_t, lowest byte written first
+//                             (except data_type code_target_with_id, which
+//                             is followed by a signed int, not intptr_t.)
 //
-// data-jump + st.pos: 01 1110 11,
-//                     signed intptr_t, lowest byte written first
-//
-// data-jump + comm.:  10 1110 11,
-//                     signed intptr_t, lowest byte written first
-//
+//      1111: long_pc_jump
+//        The format is:
+//          pc-jump:             00 1111 11,
+//                               00 [6 bits pc delta]
+//        or
+//          pc-jump (variable length):
+//                               01 1111 11,
+//                               [7 bits data] 0
+//                                  ...
+//                               [7 bits data] 1
+//               (Bits 6..31 of pc delta, with leading zeroes
+//                dropped, and last non-zero chunk tagged with 1.)
+
+
 const int kMaxRelocModes = 14;
 
 const int kTagBits = 2;
 const int kTagMask = (1 << kTagBits) - 1;
 const int kExtraTagBits = 4;
-const int kPositionTypeTagBits = 1;
-const int kSmallDataBits = kBitsPerByte - kPositionTypeTagBits;
+const int kLocatableTypeTagBits = 2;
+const int kSmallDataBits = kBitsPerByte - kLocatableTypeTagBits;
 
 const int kEmbeddedObjectTag = 0;
 const int kCodeTargetTag = 1;
-const int kPositionTag = 2;
+const int kLocatableTag = 2;
 const int kDefaultTag = 3;
 
-const int kPCJumpTag = (1 << kExtraTagBits) - 1;
+const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1;
 
 const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
@@ -152,11 +179,12 @@
 const int kLastChunkTag = 1;
 
 
-const int kDataJumpTag = kPCJumpTag - 1;
+const int kDataJumpExtraTag = kPCJumpExtraTag - 1;
 
-const int kNonstatementPositionTag = 0;
-const int kStatementPositionTag = 1;
-const int kCommentTag = 2;
+const int kCodeWithIdTag = 0;
+const int kNonstatementPositionTag = 1;
+const int kStatementPositionTag = 2;
+const int kCommentTag = 3;
 
 
 uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
@@ -164,7 +192,7 @@
   // Otherwise write a variable length PC jump for the bits that do
   // not fit in the kSmallPCDeltaBits bits.
   if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
-  WriteExtraTag(kPCJumpTag, kVariableLengthPCJumpTopTag);
+  WriteExtraTag(kPCJumpExtraTag, kVariableLengthPCJumpTopTag);
   uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
   ASSERT(pc_jump > 0);
   // Write kChunkBits size chunks of the pc_jump.
@@ -187,7 +215,7 @@
 
 
 void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) {
-  *--pos_ = static_cast<byte>(data_delta << kPositionTypeTagBits | tag);
+  *--pos_ = static_cast<byte>(data_delta << kLocatableTypeTagBits | tag);
 }
 
 
@@ -206,11 +234,20 @@
 }
 
 
+void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
+  WriteExtraTag(kDataJumpExtraTag, top_tag);
+  for (int i = 0; i < kIntSize; i++) {
+    *--pos_ = static_cast<byte>(data_delta);
+    // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
+    data_delta = data_delta >> kBitsPerByte;
+  }
+}
+
 void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) {
-  WriteExtraTag(kDataJumpTag, top_tag);
+  WriteExtraTag(kDataJumpExtraTag, top_tag);
   for (int i = 0; i < kIntptrSize; i++) {
     *--pos_ = static_cast<byte>(data_delta);
-  // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
+    // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
     data_delta = data_delta >> kBitsPerByte;
   }
 }
@@ -221,7 +258,8 @@
   byte* begin_pos = pos_;
 #endif
   ASSERT(rinfo->pc() - last_pc_ >= 0);
-  ASSERT(RelocInfo::NUMBER_OF_MODES <= kMaxRelocModes);
+  ASSERT(RelocInfo::NUMBER_OF_MODES - RelocInfo::LAST_COMPACT_ENUM <=
+         kMaxRelocModes);
   // Use unsigned delta-encoding for pc.
   uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
   RelocInfo::Mode rmode = rinfo->rmode();
@@ -232,35 +270,48 @@
   } else if (rmode == RelocInfo::CODE_TARGET) {
     WriteTaggedPC(pc_delta, kCodeTargetTag);
     ASSERT(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
-  } else if (RelocInfo::IsPosition(rmode)) {
-    // Use signed delta-encoding for data.
-    intptr_t data_delta = rinfo->data() - last_data_;
-    int pos_type_tag = rmode == RelocInfo::POSITION ? kNonstatementPositionTag
-                                                    : kStatementPositionTag;
-    // Check if data is small enough to fit in a tagged byte.
-    // We cannot use is_intn because data_delta is not an int32_t.
-    if (data_delta >= -(1 << (kSmallDataBits-1)) &&
-        data_delta < 1 << (kSmallDataBits-1)) {
-      WriteTaggedPC(pc_delta, kPositionTag);
-      WriteTaggedData(data_delta, pos_type_tag);
-      last_data_ = rinfo->data();
+  } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
+    // Use signed delta-encoding for id.
+    ASSERT(static_cast<int>(rinfo->data()) == rinfo->data());
+    int id_delta = static_cast<int>(rinfo->data()) - last_id_;
+    // Check if delta is small enough to fit in a tagged byte.
+    if (is_intn(id_delta, kSmallDataBits)) {
+      WriteTaggedPC(pc_delta, kLocatableTag);
+      WriteTaggedData(id_delta, kCodeWithIdTag);
     } else {
       // Otherwise, use costly encoding.
-      WriteExtraTaggedPC(pc_delta, kPCJumpTag);
-      WriteExtraTaggedData(data_delta, pos_type_tag);
-      last_data_ = rinfo->data();
+      WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
+      WriteExtraTaggedIntData(id_delta, kCodeWithIdTag);
     }
+    last_id_ = static_cast<int>(rinfo->data());
+  } else if (RelocInfo::IsPosition(rmode)) {
+    // Use signed delta-encoding for position.
+    ASSERT(static_cast<int>(rinfo->data()) == rinfo->data());
+    int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
+    int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
+                                                      : kStatementPositionTag;
+    // Check if delta is small enough to fit in a tagged byte.
+    if (is_intn(pos_delta, kSmallDataBits)) {
+      WriteTaggedPC(pc_delta, kLocatableTag);
+      WriteTaggedData(pos_delta, pos_type_tag);
+    } else {
+      // Otherwise, use costly encoding.
+      WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
+      WriteExtraTaggedIntData(pos_delta, pos_type_tag);
+    }
+    last_position_ = static_cast<int>(rinfo->data());
   } else if (RelocInfo::IsComment(rmode)) {
     // Comments are normally not generated, so we use the costly encoding.
-    WriteExtraTaggedPC(pc_delta, kPCJumpTag);
-    WriteExtraTaggedData(rinfo->data() - last_data_, kCommentTag);
-    last_data_ = rinfo->data();
+    WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
+    WriteExtraTaggedData(rinfo->data(), kCommentTag);
     ASSERT(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
   } else {
+    ASSERT(rmode > RelocInfo::LAST_COMPACT_ENUM);
+    int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
     // For all other modes we simply use the mode as the extra tag.
     // None of these modes need a data component.
-    ASSERT(rmode < kPCJumpTag && rmode < kDataJumpTag);
-    WriteExtraTaggedPC(pc_delta, rmode);
+    ASSERT(saved_mode < kPCJumpExtraTag && saved_mode < kDataJumpExtraTag);
+    WriteExtraTaggedPC(pc_delta, saved_mode);
   }
   last_pc_ = rinfo->pc();
 #ifdef DEBUG
@@ -294,12 +345,32 @@
 }
 
 
+void RelocIterator::AdvanceReadId() {
+  int x = 0;
+  for (int i = 0; i < kIntSize; i++) {
+    x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
+  }
+  last_id_ += x;
+  rinfo_.data_ = last_id_;
+}
+
+
+void RelocIterator::AdvanceReadPosition() {
+  int x = 0;
+  for (int i = 0; i < kIntSize; i++) {
+    x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
+  }
+  last_position_ += x;
+  rinfo_.data_ = last_position_;
+}
+
+
 void RelocIterator::AdvanceReadData() {
   intptr_t x = 0;
   for (int i = 0; i < kIntptrSize; i++) {
     x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
   }
-  rinfo_.data_ += x;
+  rinfo_.data_ = x;
 }
 
 
@@ -319,27 +390,33 @@
 }
 
 
-inline int RelocIterator::GetPositionTypeTag() {
-  return *pos_ & ((1 << kPositionTypeTagBits) - 1);
+inline int RelocIterator::GetLocatableTypeTag() {
+  return *pos_ & ((1 << kLocatableTypeTagBits) - 1);
 }
 
 
-inline void RelocIterator::ReadTaggedData() {
+inline void RelocIterator::ReadTaggedId() {
   int8_t signed_b = *pos_;
   // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
-  rinfo_.data_ += signed_b >> kPositionTypeTagBits;
+  last_id_ += signed_b >> kLocatableTypeTagBits;
+  rinfo_.data_ = last_id_;
 }
 
 
-inline RelocInfo::Mode RelocIterator::DebugInfoModeFromTag(int tag) {
-  if (tag == kStatementPositionTag) {
-    return RelocInfo::STATEMENT_POSITION;
-  } else if (tag == kNonstatementPositionTag) {
-    return RelocInfo::POSITION;
-  } else {
-    ASSERT(tag == kCommentTag);
-    return RelocInfo::COMMENT;
-  }
+inline void RelocIterator::ReadTaggedPosition() {
+  int8_t signed_b = *pos_;
+  // Signed right shift is arithmetic shift.  Tested in test-utils.cc.
+  last_position_ += signed_b >> kLocatableTypeTagBits;
+  rinfo_.data_ = last_position_;
+}
+
+
+static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
+  ASSERT(tag == kNonstatementPositionTag ||
+         tag == kStatementPositionTag);
+  return (tag == kNonstatementPositionTag) ?
+         RelocInfo::POSITION :
+         RelocInfo::STATEMENT_POSITION;
 }
 
 
@@ -358,37 +435,64 @@
     } else if (tag == kCodeTargetTag) {
       ReadTaggedPC();
       if (SetMode(RelocInfo::CODE_TARGET)) return;
-    } else if (tag == kPositionTag) {
+    } else if (tag == kLocatableTag) {
       ReadTaggedPC();
       Advance();
-      // Check if we want source positions.
-      if (mode_mask_ & RelocInfo::kPositionMask) {
-        ReadTaggedData();
-        if (SetMode(DebugInfoModeFromTag(GetPositionTypeTag()))) return;
+      int locatable_tag = GetLocatableTypeTag();
+      if (locatable_tag == kCodeWithIdTag) {
+        if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
+          ReadTaggedId();
+          return;
+        }
+      } else {
+        // Compact encoding is never used for comments,
+        // so it must be a position.
+        ASSERT(locatable_tag == kNonstatementPositionTag ||
+               locatable_tag == kStatementPositionTag);
+        if (mode_mask_ & RelocInfo::kPositionMask) {
+          ReadTaggedPosition();
+          if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
+        }
       }
     } else {
       ASSERT(tag == kDefaultTag);
       int extra_tag = GetExtraTag();
-      if (extra_tag == kPCJumpTag) {
+      if (extra_tag == kPCJumpExtraTag) {
         int top_tag = GetTopTag();
         if (top_tag == kVariableLengthPCJumpTopTag) {
           AdvanceReadVariableLengthPCJump();
         } else {
           AdvanceReadPC();
         }
-      } else if (extra_tag == kDataJumpTag) {
-        // Check if we want debug modes (the only ones with data).
-        if (mode_mask_ & RelocInfo::kDebugMask) {
-          int top_tag = GetTopTag();
-          AdvanceReadData();
-          if (SetMode(DebugInfoModeFromTag(top_tag))) return;
+      } else if (extra_tag == kDataJumpExtraTag) {
+        int locatable_tag = GetTopTag();
+        if (locatable_tag == kCodeWithIdTag) {
+          if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
+            AdvanceReadId();
+            return;
+          }
+          Advance(kIntSize);
+        } else if (locatable_tag != kCommentTag) {
+          ASSERT(locatable_tag == kNonstatementPositionTag ||
+                 locatable_tag == kStatementPositionTag);
+          if (mode_mask_ & RelocInfo::kPositionMask) {
+            AdvanceReadPosition();
+            if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
+          } else {
+            Advance(kIntSize);
+          }
         } else {
-          // Otherwise, just skip over the data.
+          ASSERT(locatable_tag == kCommentTag);
+          if (SetMode(RelocInfo::COMMENT)) {
+            AdvanceReadData();
+            return;
+          }
           Advance(kIntptrSize);
         }
       } else {
         AdvanceReadPC();
-        if (SetMode(static_cast<RelocInfo::Mode>(extra_tag))) return;
+        int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM;
+        if (SetMode(static_cast<RelocInfo::Mode>(rmode))) return;
       }
     }
   }
@@ -404,6 +508,8 @@
   end_ = code->relocation_start();
   done_ = false;
   mode_mask_ = mode_mask;
+  last_id_ = 0;
+  last_position_ = 0;
   if (mode_mask_ == 0) pos_ = end_;
   next();
 }
@@ -417,6 +523,8 @@
   end_ = pos_ - desc.reloc_size;
   done_ = false;
   mode_mask_ = mode_mask;
+  last_id_ = 0;
+  last_position_ = 0;
   if (mode_mask_ == 0) pos_ = end_;
   next();
 }
@@ -444,6 +552,8 @@
       return "debug break";
     case RelocInfo::CODE_TARGET:
       return "code target";
+    case RelocInfo::CODE_TARGET_WITH_ID:
+      return "code target with id";
     case RelocInfo::GLOBAL_PROPERTY_CELL:
       return "global property cell";
     case RelocInfo::RUNTIME_ENTRY:
@@ -490,6 +600,9 @@
     Code* code = Code::GetCodeFromTargetAddress(target_address());
     PrintF(out, " (%s)  (%p)", Code::Kind2String(code->kind()),
            target_address());
+    if (rmode_ == CODE_TARGET_WITH_ID) {
+      PrintF(" (id=%d)", static_cast<int>(data_));
+    }
   } else if (IsPosition(rmode_)) {
     PrintF(out, "  (%" V8_PTR_PREFIX "d)", data());
   } else if (rmode_ == RelocInfo::RUNTIME_ENTRY &&
@@ -523,6 +636,7 @@
 #endif
     case CONSTRUCT_CALL:
     case CODE_TARGET_CONTEXT:
+    case CODE_TARGET_WITH_ID:
     case CODE_TARGET: {
       // convert inline target address to code object
       Address addr = target_address();
diff --git a/src/assembler.h b/src/assembler.h
index e8cecc3..918de62 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -42,7 +42,7 @@
 namespace v8 {
 namespace internal {
 
-
+const unsigned kNoASTId = -1;
 // -----------------------------------------------------------------------------
 // Platform independent assembler base class.
 
@@ -209,10 +209,11 @@
 
   enum Mode {
     // Please note the order is important (see IsCodeTarget, IsGCRelocMode).
+    CODE_TARGET,  // Code target which is not any of the above.
+    CODE_TARGET_WITH_ID,
     CONSTRUCT_CALL,  // code target that is a call to a JavaScript constructor.
     CODE_TARGET_CONTEXT,  // Code target used for contextual loads and stores.
     DEBUG_BREAK,  // Code target for the debugger statement.
-    CODE_TARGET,  // Code target which is not any of the above.
     EMBEDDED_OBJECT,
     GLOBAL_PROPERTY_CELL,
 
@@ -228,10 +229,12 @@
 
     // add more as needed
     // Pseudo-types
-    NUMBER_OF_MODES,  // must be no greater than 14 - see RelocInfoWriter
+    NUMBER_OF_MODES,  // There are at most 14 modes with noncompact encoding.
     NONE,  // never recorded
-    LAST_CODE_ENUM = CODE_TARGET,
-    LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL
+    LAST_CODE_ENUM = DEBUG_BREAK,
+    LAST_GCED_ENUM = GLOBAL_PROPERTY_CELL,
+    // Modes <= LAST_COMPACT_ENUM are guaranteed to have compact encoding.
+    LAST_COMPACT_ENUM = CODE_TARGET_WITH_ID
   };
 
 
@@ -361,7 +364,8 @@
 
   static const int kCodeTargetMask = (1 << (LAST_CODE_ENUM + 1)) - 1;
   static const int kPositionMask = 1 << POSITION | 1 << STATEMENT_POSITION;
-  static const int kDebugMask = kPositionMask | 1 << COMMENT;
+  static const int kDataMask =
+      (1 << CODE_TARGET_WITH_ID) | kPositionMask | (1 << COMMENT);
   static const int kApplyMask;  // Modes affected by apply. Depends on arch.
 
  private:
@@ -380,9 +384,14 @@
 // lower addresses.
 class RelocInfoWriter BASE_EMBEDDED {
  public:
-  RelocInfoWriter() : pos_(NULL), last_pc_(NULL), last_data_(0) {}
-  RelocInfoWriter(byte* pos, byte* pc) : pos_(pos), last_pc_(pc),
-                                         last_data_(0) {}
+  RelocInfoWriter() : pos_(NULL),
+                      last_pc_(NULL),
+                      last_id_(0),
+                      last_position_(0) {}
+  RelocInfoWriter(byte* pos, byte* pc) : pos_(pos),
+                                         last_pc_(pc),
+                                         last_id_(0),
+                                         last_position_(0) {}
 
   byte* pos() const { return pos_; }
   byte* last_pc() const { return last_pc_; }
@@ -407,13 +416,15 @@
   inline uint32_t WriteVariableLengthPCJump(uint32_t pc_delta);
   inline void WriteTaggedPC(uint32_t pc_delta, int tag);
   inline void WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag);
+  inline void WriteExtraTaggedIntData(int data_delta, int top_tag);
   inline void WriteExtraTaggedData(intptr_t data_delta, int top_tag);
   inline void WriteTaggedData(intptr_t data_delta, int tag);
   inline void WriteExtraTag(int extra_tag, int top_tag);
 
   byte* pos_;
   byte* last_pc_;
-  intptr_t last_data_;
+  int last_id_;
+  int last_position_;
   DISALLOW_COPY_AND_ASSIGN(RelocInfoWriter);
 };
 
@@ -455,12 +466,13 @@
   int GetTopTag();
   void ReadTaggedPC();
   void AdvanceReadPC();
+  void AdvanceReadId();
+  void AdvanceReadPosition();
   void AdvanceReadData();
   void AdvanceReadVariableLengthPCJump();
-  int GetPositionTypeTag();
-  void ReadTaggedData();
-
-  static RelocInfo::Mode DebugInfoModeFromTag(int tag);
+  int GetLocatableTypeTag();
+  void ReadTaggedId();
+  void ReadTaggedPosition();
 
   // If the given mode is wanted, set it in rinfo_ and return true.
   // Else return false. Used for efficiently skipping unwanted modes.
@@ -473,6 +485,8 @@
   RelocInfo rinfo_;
   bool done_;
   int mode_mask_;
+  int last_id_;
+  int last_position_;
   DISALLOW_COPY_AND_ASSIGN(RelocIterator);
 };
 
diff --git a/src/ast.cc b/src/ast.cc
index 7ae0f34..303189d 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -413,8 +413,7 @@
 
 
 bool Throw::IsInlineable() const {
-  // TODO(1143): Make functions containing throw inlineable.
-  return false;
+  return true;
 }
 
 
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index a30ffc0..0800714 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -141,7 +141,8 @@
 
 class Genesis BASE_EMBEDDED {
  public:
-  Genesis(Handle<Object> global_object,
+  Genesis(Isolate* isolate,
+          Handle<Object> global_object,
           v8::Handle<v8::ObjectTemplate> global_template,
           v8::ExtensionConfiguration* extensions);
   ~Genesis() { }
@@ -150,8 +151,13 @@
 
   Genesis* previous() { return previous_; }
 
+  Isolate* isolate() const { return isolate_; }
+  Factory* factory() const { return isolate_->factory(); }
+  Heap* heap() const { return isolate_->heap(); }
+
  private:
   Handle<Context> global_context_;
+  Isolate* isolate_;
 
   // There may be more than one active genesis object: When GC is
   // triggered during environment creation there may be weak handle
@@ -163,7 +169,7 @@
   // Creates some basic objects. Used for creating a context from scratch.
   void CreateRoots();
   // Creates the empty function.  Used for creating a context from scratch.
-  Handle<JSFunction> CreateEmptyFunction();
+  Handle<JSFunction> CreateEmptyFunction(Isolate* isolate);
   // Creates the ThrowTypeError function. ECMA 5th Ed. 13.2.3
   Handle<JSFunction> CreateThrowTypeErrorFunction(Builtins::Name builtin);
 
@@ -194,6 +200,7 @@
   // Used for creating a context from scratch.
   void InstallNativeFunctions();
   bool InstallNatives();
+  bool InstallExperimentalNatives();
   void InstallBuiltinFunctionIds();
   void InstallJSFunctionResultCaches();
   void InitializeNormalizedMapCaches();
@@ -239,7 +246,8 @@
       Handle<FixedArray> arguments,
       Handle<FixedArray> caller);
 
-  static bool CompileBuiltin(int index);
+  static bool CompileBuiltin(Isolate* isolate, int index);
+  static bool CompileExperimentalBuiltin(Isolate* isolate, int index);
   static bool CompileNative(Vector<const char> name, Handle<String> source);
   static bool CompileScriptCached(Vector<const char> name,
                                   Handle<String> source,
@@ -269,12 +277,13 @@
 
 
 Handle<Context> Bootstrapper::CreateEnvironment(
+    Isolate* isolate,
     Handle<Object> global_object,
     v8::Handle<v8::ObjectTemplate> global_template,
     v8::ExtensionConfiguration* extensions) {
   HandleScope scope;
   Handle<Context> env;
-  Genesis genesis(global_object, global_template, extensions);
+  Genesis genesis(isolate, global_object, global_template, extensions);
   env = genesis.result();
   if (!env.is_null()) {
     if (InstallExtensions(env, extensions)) {
@@ -287,15 +296,16 @@
 
 static void SetObjectPrototype(Handle<JSObject> object, Handle<Object> proto) {
   // object.__proto__ = proto;
+  Factory* factory = object->GetIsolate()->factory();
   Handle<Map> old_to_map = Handle<Map>(object->map());
-  Handle<Map> new_to_map = FACTORY->CopyMapDropTransitions(old_to_map);
+  Handle<Map> new_to_map = factory->CopyMapDropTransitions(old_to_map);
   new_to_map->set_prototype(*proto);
   object->set_map(*new_to_map);
 }
 
 
 void Bootstrapper::DetachGlobal(Handle<Context> env) {
-  Factory* factory = Isolate::Current()->factory();
+  Factory* factory = env->GetIsolate()->factory();
   JSGlobalProxy::cast(env->global_proxy())->set_context(*factory->null_value());
   SetObjectPrototype(Handle<JSObject>(env->global_proxy()),
                      factory->null_value());
@@ -322,7 +332,7 @@
                                           Handle<JSObject> prototype,
                                           Builtins::Name call,
                                           bool is_ecma_native) {
-  Isolate* isolate = Isolate::Current();
+  Isolate* isolate = target->GetIsolate();
   Factory* factory = isolate->factory();
   Handle<String> symbol = factory->LookupAsciiSymbol(name);
   Handle<Code> call_code = Handle<Code>(isolate->builtins()->builtin(call));
@@ -344,30 +354,31 @@
 
 Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor(
     PrototypePropertyMode prototypeMode) {
-  Factory* factory = Isolate::Current()->factory();
   Handle<DescriptorArray> descriptors =
-      factory->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE ? 4 : 5);
+      factory()->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE
+                                    ? 4
+                                    : 5);
   PropertyAttributes attributes =
       static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
 
   {  // Add length.
-    Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionLength);
-    CallbacksDescriptor d(*factory->length_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionLength);
+    CallbacksDescriptor d(*factory()->length_symbol(), *proxy, attributes);
     descriptors->Set(0, &d);
   }
   {  // Add name.
-    Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionName);
-    CallbacksDescriptor d(*factory->name_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionName);
+    CallbacksDescriptor d(*factory()->name_symbol(), *proxy, attributes);
     descriptors->Set(1, &d);
   }
   {  // Add arguments.
-    Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionArguments);
-    CallbacksDescriptor d(*factory->arguments_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionArguments);
+    CallbacksDescriptor d(*factory()->arguments_symbol(), *proxy, attributes);
     descriptors->Set(2, &d);
   }
   {  // Add caller.
-    Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionCaller);
-    CallbacksDescriptor d(*factory->caller_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionCaller);
+    CallbacksDescriptor d(*factory()->caller_symbol(), *proxy, attributes);
     descriptors->Set(3, &d);
   }
   if (prototypeMode != DONT_ADD_PROTOTYPE) {
@@ -375,8 +386,8 @@
     if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
       attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY);
     }
-    Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionPrototype);
-    CallbacksDescriptor d(*factory->prototype_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionPrototype);
+    CallbacksDescriptor d(*factory()->prototype_symbol(), *proxy, attributes);
     descriptors->Set(4, &d);
   }
   descriptors->Sort();
@@ -385,7 +396,7 @@
 
 
 Handle<Map> Genesis::CreateFunctionMap(PrototypePropertyMode prototype_mode) {
-  Handle<Map> map = FACTORY->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
+  Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
   Handle<DescriptorArray> descriptors =
       ComputeFunctionInstanceDescriptor(prototype_mode);
   map->set_instance_descriptors(*descriptors);
@@ -394,7 +405,7 @@
 }
 
 
-Handle<JSFunction> Genesis::CreateEmptyFunction() {
+Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
   // Allocate the map for function instances. Maps are allocated first and their
   // prototypes patched later, once empty function is created.
 
@@ -422,7 +433,6 @@
   function_instance_map_writable_prototype_ =
       CreateFunctionMap(ADD_WRITEABLE_PROTOTYPE);
 
-  Isolate* isolate = Isolate::Current();
   Factory* factory = isolate->factory();
   Heap* heap = isolate->heap();
 
@@ -491,28 +501,31 @@
     PrototypePropertyMode prototypeMode,
     Handle<FixedArray> arguments,
     Handle<FixedArray> caller) {
-  Factory* factory = Isolate::Current()->factory();
   Handle<DescriptorArray> descriptors =
-      factory->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE ? 4 : 5);
+      factory()->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE
+                                    ? 4
+                                    : 5);
   PropertyAttributes attributes = static_cast<PropertyAttributes>(
       DONT_ENUM | DONT_DELETE | READ_ONLY);
 
   {  // length
-    Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionLength);
-    CallbacksDescriptor d(*factory->length_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionLength);
+    CallbacksDescriptor d(*factory()->length_symbol(), *proxy, attributes);
     descriptors->Set(0, &d);
   }
   {  // name
-    Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionName);
-    CallbacksDescriptor d(*factory->name_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionName);
+    CallbacksDescriptor d(*factory()->name_symbol(), *proxy, attributes);
     descriptors->Set(1, &d);
   }
   {  // arguments
-    CallbacksDescriptor d(*factory->arguments_symbol(), *arguments, attributes);
+    CallbacksDescriptor d(*factory()->arguments_symbol(),
+                          *arguments,
+                          attributes);
     descriptors->Set(2, &d);
   }
   {  // caller
-    CallbacksDescriptor d(*factory->caller_symbol(), *caller, attributes);
+    CallbacksDescriptor d(*factory()->caller_symbol(), *caller, attributes);
     descriptors->Set(3, &d);
   }
 
@@ -521,8 +534,8 @@
     if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
       attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY);
     }
-    Handle<Proxy> proxy = factory->NewProxy(&Accessors::FunctionPrototype);
-    CallbacksDescriptor d(*factory->prototype_symbol(), *proxy, attributes);
+    Handle<Proxy> proxy = factory()->NewProxy(&Accessors::FunctionPrototype);
+    CallbacksDescriptor d(*factory()->prototype_symbol(), *proxy, attributes);
     descriptors->Set(4, &d);
   }
 
@@ -534,14 +547,11 @@
 // ECMAScript 5th Edition, 13.2.3
 Handle<JSFunction> Genesis::CreateThrowTypeErrorFunction(
     Builtins::Name builtin) {
-  Isolate* isolate = Isolate::Current();
-  Factory* factory = isolate->factory();
-
-  Handle<String> name = factory->LookupAsciiSymbol("ThrowTypeError");
+  Handle<String> name = factory()->LookupAsciiSymbol("ThrowTypeError");
   Handle<JSFunction> throw_type_error =
-      factory->NewFunctionWithoutPrototype(name, kStrictMode);
+      factory()->NewFunctionWithoutPrototype(name, kStrictMode);
   Handle<Code> code = Handle<Code>(
-      isolate->builtins()->builtin(builtin));
+      isolate()->builtins()->builtin(builtin));
 
   throw_type_error->set_map(global_context()->strict_mode_function_map());
   throw_type_error->set_code(*code);
@@ -559,7 +569,7 @@
     Handle<JSFunction> empty_function,
     Handle<FixedArray> arguments_callbacks,
     Handle<FixedArray> caller_callbacks) {
-  Handle<Map> map = FACTORY->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
+  Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
   Handle<DescriptorArray> descriptors =
       ComputeStrictFunctionInstanceDescriptor(prototype_mode,
                                               arguments_callbacks,
@@ -574,7 +584,7 @@
 void Genesis::CreateStrictModeFunctionMaps(Handle<JSFunction> empty) {
   // Create the callbacks arrays for ThrowTypeError functions.
   // The get/set callacks are filled in after the maps are created below.
-  Factory* factory = Isolate::Current()->factory();
+  Factory* factory = empty->GetIsolate()->factory();
   Handle<FixedArray> arguments = factory->NewFixedArray(2, TENURED);
   Handle<FixedArray> caller = factory->NewFixedArray(2, TENURED);
 
@@ -623,7 +633,7 @@
 
 static void AddToWeakGlobalContextList(Context* context) {
   ASSERT(context->IsGlobalContext());
-  Heap* heap = Isolate::Current()->heap();
+  Heap* heap = context->GetIsolate()->heap();
 #ifdef DEBUG
   { // NOLINT
     ASSERT(context->get(Context::NEXT_CONTEXT_LINK)->IsUndefined());
@@ -641,15 +651,14 @@
 
 
 void Genesis::CreateRoots() {
-  Isolate* isolate = Isolate::Current();
   // Allocate the global context FixedArray first and then patch the
   // closure and extension object later (we need the empty function
   // and the global object, but in order to create those, we need the
   // global context).
-  global_context_ = Handle<Context>::cast(isolate->global_handles()->Create(
-              *isolate->factory()->NewGlobalContext()));
+  global_context_ = Handle<Context>::cast(isolate()->global_handles()->Create(
+              *factory()->NewGlobalContext()));
   AddToWeakGlobalContextList(*global_context_);
-  isolate->set_context(*global_context());
+  isolate()->set_context(*global_context());
 
   // Allocate the message listeners object.
   {
@@ -692,17 +701,13 @@
     }
   }
 
-  Isolate* isolate = Isolate::Current();
-  Factory* factory = isolate->factory();
-  Heap* heap = isolate->heap();
-
   if (js_global_template.is_null()) {
-    Handle<String> name = Handle<String>(heap->empty_symbol());
-    Handle<Code> code = Handle<Code>(isolate->builtins()->builtin(
+    Handle<String> name = Handle<String>(heap()->empty_symbol());
+    Handle<Code> code = Handle<Code>(isolate()->builtins()->builtin(
         Builtins::kIllegal));
     js_global_function =
-        factory->NewFunction(name, JS_GLOBAL_OBJECT_TYPE,
-                             JSGlobalObject::kSize, code, true);
+        factory()->NewFunction(name, JS_GLOBAL_OBJECT_TYPE,
+                               JSGlobalObject::kSize, code, true);
     // Change the constructor property of the prototype of the
     // hidden global function to refer to the Object function.
     Handle<JSObject> prototype =
@@ -710,20 +715,20 @@
             JSObject::cast(js_global_function->instance_prototype()));
     SetLocalPropertyNoThrow(
         prototype,
-        factory->constructor_symbol(),
-        isolate->object_function(),
+        factory()->constructor_symbol(),
+        isolate()->object_function(),
         NONE);
   } else {
     Handle<FunctionTemplateInfo> js_global_constructor(
         FunctionTemplateInfo::cast(js_global_template->constructor()));
     js_global_function =
-        factory->CreateApiFunction(js_global_constructor,
-                                   factory->InnerGlobalObject);
+        factory()->CreateApiFunction(js_global_constructor,
+                                     factory()->InnerGlobalObject);
   }
 
   js_global_function->initial_map()->set_is_hidden_prototype();
   Handle<GlobalObject> inner_global =
-      factory->NewGlobalObject(js_global_function);
+      factory()->NewGlobalObject(js_global_function);
   if (inner_global_out != NULL) {
     *inner_global_out = inner_global;
   }
@@ -731,23 +736,23 @@
   // Step 2: create or re-initialize the global proxy object.
   Handle<JSFunction> global_proxy_function;
   if (global_template.IsEmpty()) {
-    Handle<String> name = Handle<String>(heap->empty_symbol());
-    Handle<Code> code = Handle<Code>(isolate->builtins()->builtin(
+    Handle<String> name = Handle<String>(heap()->empty_symbol());
+    Handle<Code> code = Handle<Code>(isolate()->builtins()->builtin(
         Builtins::kIllegal));
     global_proxy_function =
-        factory->NewFunction(name, JS_GLOBAL_PROXY_TYPE,
-                             JSGlobalProxy::kSize, code, true);
+        factory()->NewFunction(name, JS_GLOBAL_PROXY_TYPE,
+                               JSGlobalProxy::kSize, code, true);
   } else {
     Handle<ObjectTemplateInfo> data =
         v8::Utils::OpenHandle(*global_template);
     Handle<FunctionTemplateInfo> global_constructor(
             FunctionTemplateInfo::cast(data->constructor()));
     global_proxy_function =
-        factory->CreateApiFunction(global_constructor,
-                                   factory->OuterGlobalObject);
+        factory()->CreateApiFunction(global_constructor,
+                                     factory()->OuterGlobalObject);
   }
 
-  Handle<String> global_name = factory->LookupAsciiSymbol("global");
+  Handle<String> global_name = factory()->LookupAsciiSymbol("global");
   global_proxy_function->shared()->set_instance_class_name(*global_name);
   global_proxy_function->initial_map()->set_is_access_check_needed(true);
 
@@ -761,7 +766,7 @@
         Handle<JSGlobalProxy>::cast(global_object));
   } else {
     return Handle<JSGlobalProxy>::cast(
-        factory->NewJSObject(global_proxy_function, TENURED));
+        factory()->NewJSObject(global_proxy_function, TENURED));
   }
 }
 
@@ -786,7 +791,7 @@
   static const PropertyAttributes attributes =
       static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
   ForceSetProperty(builtins_global,
-                   FACTORY->LookupAsciiSymbol("global"),
+                   factory()->LookupAsciiSymbol("global"),
                    inner_global,
                    attributes);
   // Setup the reference from the global object to the builtins object.
@@ -814,7 +819,7 @@
   // object reinitialization.
   global_context()->set_security_token(*inner_global);
 
-  Isolate* isolate = Isolate::Current();
+  Isolate* isolate = inner_global->GetIsolate();
   Factory* factory = isolate->factory();
   Heap* heap = isolate->heap();
 
@@ -1164,17 +1169,26 @@
 }
 
 
-bool Genesis::CompileBuiltin(int index) {
+bool Genesis::CompileBuiltin(Isolate* isolate, int index) {
   Vector<const char> name = Natives::GetScriptName(index);
   Handle<String> source_code =
-      Isolate::Current()->bootstrapper()->NativesSourceLookup(index);
+      isolate->bootstrapper()->NativesSourceLookup(index);
+  return CompileNative(name, source_code);
+}
+
+
+bool Genesis::CompileExperimentalBuiltin(Isolate* isolate, int index) {
+  Vector<const char> name = ExperimentalNatives::GetScriptName(index);
+  Factory* factory = isolate->factory();
+  Handle<String> source_code =
+      factory->NewStringFromAscii(ExperimentalNatives::GetScriptSource(index));
   return CompileNative(name, source_code);
 }
 
 
 bool Genesis::CompileNative(Vector<const char> name, Handle<String> source) {
   HandleScope scope;
-  Isolate* isolate = Isolate::Current();
+  Isolate* isolate = source->GetIsolate();
 #ifdef ENABLE_DEBUGGER_SUPPORT
   isolate->debugger()->set_compiling_natives(true);
 #endif
@@ -1199,7 +1213,7 @@
                                   v8::Extension* extension,
                                   Handle<Context> top_context,
                                   bool use_runtime_context) {
-  Factory* factory = Isolate::Current()->factory();
+  Factory* factory = source->GetIsolate()->factory();
   HandleScope scope;
   Handle<SharedFunctionInfo> function_info;
 
@@ -1246,15 +1260,15 @@
 }
 
 
-#define INSTALL_NATIVE(Type, name, var)                                        \
-  Handle<String> var##_name = factory->LookupAsciiSymbol(name);                \
-  Object* var##_native =                                                       \
-      global_context()->builtins()->GetPropertyNoExceptionThrown(*var##_name); \
+#define INSTALL_NATIVE(Type, name, var)                                       \
+  Handle<String> var##_name = factory()->LookupAsciiSymbol(name);             \
+  Object* var##_native =                                                      \
+      global_context()->builtins()->GetPropertyNoExceptionThrown(             \
+           *var##_name);                                                      \
   global_context()->set_##var(Type::cast(var##_native));
 
 
 void Genesis::InstallNativeFunctions() {
-  Factory* factory = Isolate::Current()->factory();
   HandleScope scope;
   INSTALL_NATIVE(JSFunction, "CreateDate", create_date_fun);
   INSTALL_NATIVE(JSFunction, "ToNumber", to_number_fun);
@@ -1277,25 +1291,23 @@
 
 bool Genesis::InstallNatives() {
   HandleScope scope;
-  Isolate* isolate = Isolate::Current();
-  Factory* factory = isolate->factory();
-  Heap* heap = isolate->heap();
 
   // Create a function for the builtins object. Allocate space for the
   // JavaScript builtins, a reference to the builtins object
   // (itself) and a reference to the global_context directly in the object.
   Handle<Code> code = Handle<Code>(
-      isolate->builtins()->builtin(Builtins::kIllegal));
+      isolate()->builtins()->builtin(Builtins::kIllegal));
   Handle<JSFunction> builtins_fun =
-      factory->NewFunction(factory->empty_symbol(), JS_BUILTINS_OBJECT_TYPE,
-                           JSBuiltinsObject::kSize, code, true);
+      factory()->NewFunction(factory()->empty_symbol(),
+                             JS_BUILTINS_OBJECT_TYPE,
+                             JSBuiltinsObject::kSize, code, true);
 
-  Handle<String> name = factory->LookupAsciiSymbol("builtins");
+  Handle<String> name = factory()->LookupAsciiSymbol("builtins");
   builtins_fun->shared()->set_instance_class_name(*name);
 
   // Allocate the builtins object.
   Handle<JSBuiltinsObject> builtins =
-      Handle<JSBuiltinsObject>::cast(factory->NewGlobalObject(builtins_fun));
+      Handle<JSBuiltinsObject>::cast(factory()->NewGlobalObject(builtins_fun));
   builtins->set_builtins(*builtins);
   builtins->set_global_context(*global_context());
   builtins->set_global_receiver(*builtins);
@@ -1306,7 +1318,7 @@
   // global object.
   static const PropertyAttributes attributes =
       static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
-  Handle<String> global_symbol = factory->LookupAsciiSymbol("global");
+  Handle<String> global_symbol = factory()->LookupAsciiSymbol("global");
   Handle<Object> global_obj(global_context()->global());
   SetLocalPropertyNoThrow(builtins, global_symbol, global_obj, attributes);
 
@@ -1315,12 +1327,13 @@
 
   // Create a bridge function that has context in the global context.
   Handle<JSFunction> bridge =
-      factory->NewFunction(factory->empty_symbol(), factory->undefined_value());
-  ASSERT(bridge->context() == *isolate->global_context());
+      factory()->NewFunction(factory()->empty_symbol(),
+                             factory()->undefined_value());
+  ASSERT(bridge->context() == *isolate()->global_context());
 
   // Allocate the builtins context.
   Handle<Context> context =
-    factory->NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
+    factory()->NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
   context->set_global(*builtins);  // override builtins global object
 
   global_context()->set_runtime_context(*context);
@@ -1329,113 +1342,113 @@
     // Builtin functions for Script.
     Handle<JSFunction> script_fun =
         InstallFunction(builtins, "Script", JS_VALUE_TYPE, JSValue::kSize,
-                        isolate->initial_object_prototype(),
+                        isolate()->initial_object_prototype(),
                         Builtins::kIllegal, false);
     Handle<JSObject> prototype =
-        factory->NewJSObject(isolate->object_function(), TENURED);
+        factory()->NewJSObject(isolate()->object_function(), TENURED);
     SetPrototype(script_fun, prototype);
     global_context()->set_script_function(*script_fun);
 
     // Add 'source' and 'data' property to scripts.
     PropertyAttributes common_attributes =
         static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
-    Handle<Proxy> proxy_source = factory->NewProxy(&Accessors::ScriptSource);
+    Handle<Proxy> proxy_source = factory()->NewProxy(&Accessors::ScriptSource);
     Handle<DescriptorArray> script_descriptors =
-        factory->CopyAppendProxyDescriptor(
-            factory->empty_descriptor_array(),
-            factory->LookupAsciiSymbol("source"),
+        factory()->CopyAppendProxyDescriptor(
+            factory()->empty_descriptor_array(),
+            factory()->LookupAsciiSymbol("source"),
             proxy_source,
             common_attributes);
-    Handle<Proxy> proxy_name = factory->NewProxy(&Accessors::ScriptName);
+    Handle<Proxy> proxy_name = factory()->NewProxy(&Accessors::ScriptName);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("name"),
+            factory()->LookupAsciiSymbol("name"),
             proxy_name,
             common_attributes);
-    Handle<Proxy> proxy_id = factory->NewProxy(&Accessors::ScriptId);
+    Handle<Proxy> proxy_id = factory()->NewProxy(&Accessors::ScriptId);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("id"),
+            factory()->LookupAsciiSymbol("id"),
             proxy_id,
             common_attributes);
     Handle<Proxy> proxy_line_offset =
-        factory->NewProxy(&Accessors::ScriptLineOffset);
+        factory()->NewProxy(&Accessors::ScriptLineOffset);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("line_offset"),
+            factory()->LookupAsciiSymbol("line_offset"),
             proxy_line_offset,
             common_attributes);
     Handle<Proxy> proxy_column_offset =
-        factory->NewProxy(&Accessors::ScriptColumnOffset);
+        factory()->NewProxy(&Accessors::ScriptColumnOffset);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("column_offset"),
+            factory()->LookupAsciiSymbol("column_offset"),
             proxy_column_offset,
             common_attributes);
-    Handle<Proxy> proxy_data = factory->NewProxy(&Accessors::ScriptData);
+    Handle<Proxy> proxy_data = factory()->NewProxy(&Accessors::ScriptData);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("data"),
+            factory()->LookupAsciiSymbol("data"),
             proxy_data,
             common_attributes);
-    Handle<Proxy> proxy_type = factory->NewProxy(&Accessors::ScriptType);
+    Handle<Proxy> proxy_type = factory()->NewProxy(&Accessors::ScriptType);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("type"),
+            factory()->LookupAsciiSymbol("type"),
             proxy_type,
             common_attributes);
     Handle<Proxy> proxy_compilation_type =
-        factory->NewProxy(&Accessors::ScriptCompilationType);
+        factory()->NewProxy(&Accessors::ScriptCompilationType);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("compilation_type"),
+            factory()->LookupAsciiSymbol("compilation_type"),
             proxy_compilation_type,
             common_attributes);
     Handle<Proxy> proxy_line_ends =
-        factory->NewProxy(&Accessors::ScriptLineEnds);
+        factory()->NewProxy(&Accessors::ScriptLineEnds);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("line_ends"),
+            factory()->LookupAsciiSymbol("line_ends"),
             proxy_line_ends,
             common_attributes);
     Handle<Proxy> proxy_context_data =
-        factory->NewProxy(&Accessors::ScriptContextData);
+        factory()->NewProxy(&Accessors::ScriptContextData);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("context_data"),
+            factory()->LookupAsciiSymbol("context_data"),
             proxy_context_data,
             common_attributes);
     Handle<Proxy> proxy_eval_from_script =
-        factory->NewProxy(&Accessors::ScriptEvalFromScript);
+        factory()->NewProxy(&Accessors::ScriptEvalFromScript);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("eval_from_script"),
+            factory()->LookupAsciiSymbol("eval_from_script"),
             proxy_eval_from_script,
             common_attributes);
     Handle<Proxy> proxy_eval_from_script_position =
-        factory->NewProxy(&Accessors::ScriptEvalFromScriptPosition);
+        factory()->NewProxy(&Accessors::ScriptEvalFromScriptPosition);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("eval_from_script_position"),
+            factory()->LookupAsciiSymbol("eval_from_script_position"),
             proxy_eval_from_script_position,
             common_attributes);
     Handle<Proxy> proxy_eval_from_function_name =
-        factory->NewProxy(&Accessors::ScriptEvalFromFunctionName);
+        factory()->NewProxy(&Accessors::ScriptEvalFromFunctionName);
     script_descriptors =
-        factory->CopyAppendProxyDescriptor(
+        factory()->CopyAppendProxyDescriptor(
             script_descriptors,
-            factory->LookupAsciiSymbol("eval_from_function_name"),
+            factory()->LookupAsciiSymbol("eval_from_function_name"),
             proxy_eval_from_function_name,
             common_attributes);
 
@@ -1443,9 +1456,9 @@
     script_map->set_instance_descriptors(*script_descriptors);
 
     // Allocate the empty script.
-    Handle<Script> script = factory->NewScript(factory->empty_string());
+    Handle<Script> script = factory()->NewScript(factory()->empty_string());
     script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
-    heap->public_set_empty_script(*script);
+    heap()->public_set_empty_script(*script);
   }
   {
     // Builtin function for OpaqueReference -- a JSValue-based object,
@@ -1454,10 +1467,10 @@
     Handle<JSFunction> opaque_reference_fun =
         InstallFunction(builtins, "OpaqueReference", JS_VALUE_TYPE,
                         JSValue::kSize,
-                        isolate->initial_object_prototype(),
+                        isolate()->initial_object_prototype(),
                         Builtins::kIllegal, false);
     Handle<JSObject> prototype =
-        factory->NewJSObject(isolate->object_function(), TENURED);
+        factory()->NewJSObject(isolate()->object_function(), TENURED);
     SetPrototype(opaque_reference_fun, prototype);
     global_context()->set_opaque_reference_function(*opaque_reference_fun);
   }
@@ -1476,23 +1489,23 @@
                         "InternalArray",
                         JS_ARRAY_TYPE,
                         JSArray::kSize,
-                        isolate->initial_object_prototype(),
+                        isolate()->initial_object_prototype(),
                         Builtins::kArrayCode,
                         true);
     Handle<JSObject> prototype =
-        factory->NewJSObject(isolate->object_function(), TENURED);
+        factory()->NewJSObject(isolate()->object_function(), TENURED);
     SetPrototype(array_function, prototype);
 
     array_function->shared()->set_construct_stub(
-        isolate->builtins()->builtin(Builtins::kArrayConstructCode));
+        isolate()->builtins()->builtin(Builtins::kArrayConstructCode));
     array_function->shared()->DontAdaptArguments();
 
     // Make "length" magic on instances.
     Handle<DescriptorArray> array_descriptors =
-        factory->CopyAppendProxyDescriptor(
-            factory->empty_descriptor_array(),
-            factory->length_symbol(),
-            factory->NewProxy(&Accessors::ArrayLength),
+        factory()->CopyAppendProxyDescriptor(
+            factory()->empty_descriptor_array(),
+            factory()->length_symbol(),
+            factory()->NewProxy(&Accessors::ArrayLength),
             static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE));
 
     array_function->initial_map()->set_instance_descriptors(
@@ -1508,8 +1521,7 @@
   for (int i = Natives::GetDebuggerCount();
        i < Natives::GetBuiltinsCount();
        i++) {
-    Vector<const char> name = Natives::GetScriptName(i);
-    if (!CompileBuiltin(i)) return false;
+    if (!CompileBuiltin(isolate(), i)) return false;
     // TODO(ager): We really only need to install the JS builtin
     // functions on the builtins object after compiling and running
     // runtime.js.
@@ -1529,9 +1541,9 @@
   InstallBuiltinFunctionIds();
 
   // Install Function.prototype.call and apply.
-  { Handle<String> key = factory->function_class_symbol();
+  { Handle<String> key = factory()->function_class_symbol();
     Handle<JSFunction> function =
-        Handle<JSFunction>::cast(GetProperty(isolate->global(), key));
+        Handle<JSFunction>::cast(GetProperty(isolate()->global(), key));
     Handle<JSObject> proto =
         Handle<JSObject>(JSObject::cast(function->instance_prototype()));
 
@@ -1573,7 +1585,7 @@
 
     // Add initial map.
     Handle<Map> initial_map =
-        factory->NewMap(JS_ARRAY_TYPE, JSRegExpResult::kSize);
+        factory()->NewMap(JS_ARRAY_TYPE, JSRegExpResult::kSize);
     initial_map->set_constructor(*array_constructor);
 
     // Set prototype on map.
@@ -1587,13 +1599,13 @@
     ASSERT_EQ(1, array_descriptors->number_of_descriptors());
 
     Handle<DescriptorArray> reresult_descriptors =
-        factory->NewDescriptorArray(3);
+        factory()->NewDescriptorArray(3);
 
     reresult_descriptors->CopyFrom(0, *array_descriptors, 0);
 
     int enum_index = 0;
     {
-      FieldDescriptor index_field(heap->index_symbol(),
+      FieldDescriptor index_field(heap()->index_symbol(),
                                   JSRegExpResult::kIndexIndex,
                                   NONE,
                                   enum_index++);
@@ -1601,7 +1613,7 @@
     }
 
     {
-      FieldDescriptor input_field(heap->input_symbol(),
+      FieldDescriptor input_field(heap()->input_symbol(),
                                   JSRegExpResult::kInputIndex,
                                   NONE,
                                   enum_index++);
@@ -1626,10 +1638,22 @@
 }
 
 
+bool Genesis::InstallExperimentalNatives() {
+  if (FLAG_harmony_proxies) {
+    for (int i = ExperimentalNatives::GetDebuggerCount();
+         i < ExperimentalNatives::GetBuiltinsCount();
+         i++) {
+      if (!CompileExperimentalBuiltin(isolate(), i)) return false;
+    }
+  }
+  return true;
+}
+
+
 static Handle<JSObject> ResolveBuiltinIdHolder(
     Handle<Context> global_context,
     const char* holder_expr) {
-  Factory* factory = Isolate::Current()->factory();
+  Factory* factory = global_context->GetIsolate()->factory();
   Handle<GlobalObject> global(global_context->global());
   const char* period_pos = strchr(holder_expr, '.');
   if (period_pos == NULL) {
@@ -1648,7 +1672,8 @@
 static void InstallBuiltinFunctionId(Handle<JSObject> holder,
                                      const char* function_name,
                                      BuiltinFunctionId id) {
-  Handle<String> name = FACTORY->LookupAsciiSymbol(function_name);
+  Factory* factory = holder->GetIsolate()->factory();
+  Handle<String> name = factory->LookupAsciiSymbol(function_name);
   Object* function_object = holder->GetProperty(*name)->ToObjectUnchecked();
   Handle<JSFunction> function(JSFunction::cast(function_object));
   function->shared()->set_function_data(Smi::FromInt(id));
@@ -1675,13 +1700,14 @@
   F(16, global_context()->regexp_function())
 
 
-static FixedArray* CreateCache(int size, JSFunction* factory) {
+static FixedArray* CreateCache(int size, JSFunction* factory_function) {
+  Factory* factory = factory_function->GetIsolate()->factory();
   // Caches are supposed to live for a long time, allocate in old space.
   int array_size = JSFunctionResultCache::kEntriesIndex + 2 * size;
   // Cannot use cast as object is not fully initialized yet.
   JSFunctionResultCache* cache = reinterpret_cast<JSFunctionResultCache*>(
-      *FACTORY->NewFixedArrayWithHoles(array_size, TENURED));
-  cache->set(JSFunctionResultCache::kFactoryIndex, factory);
+      *factory->NewFixedArrayWithHoles(array_size, TENURED));
+  cache->set(JSFunctionResultCache::kFactoryIndex, factory_function);
   cache->MakeZeroSize();
   return cache;
 }
@@ -1720,7 +1746,7 @@
 
 bool Bootstrapper::InstallExtensions(Handle<Context> global_context,
                                      v8::ExtensionConfiguration* extensions) {
-  Isolate* isolate = Isolate::Current();
+  Isolate* isolate = global_context->GetIsolate();
   BootstrapperActive active;
   SaveContext saved_context(isolate);
   isolate->set_context(*global_context);
@@ -1731,7 +1757,7 @@
 
 
 void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
-  Factory* factory = Isolate::Current()->factory();
+  Factory* factory = global_context->GetIsolate()->factory();
   HandleScope scope;
   Handle<JSGlobalObject> js_global(
       JSGlobalObject::cast(global_context->global()));
@@ -1867,9 +1893,10 @@
 
 bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) {
   HandleScope scope;
+  Factory* factory = builtins->GetIsolate()->factory();
   for (int i = 0; i < Builtins::NumberOfJavaScriptBuiltins(); i++) {
     Builtins::JavaScript id = static_cast<Builtins::JavaScript>(i);
-    Handle<String> name = FACTORY->LookupAsciiSymbol(Builtins::GetName(id));
+    Handle<String> name = factory->LookupAsciiSymbol(Builtins::GetName(id));
     Object* function_object = builtins->GetPropertyNoExceptionThrown(*name);
     Handle<JSFunction> function
         = Handle<JSFunction>(JSFunction::cast(function_object));
@@ -1918,13 +1945,12 @@
   ASSERT(object->IsInstanceOf(
       FunctionTemplateInfo::cast(object_template->constructor())));
 
-  Isolate* isolate = Isolate::Current();
   bool pending_exception = false;
   Handle<JSObject> obj =
       Execution::InstantiateObject(object_template, &pending_exception);
   if (pending_exception) {
-    ASSERT(isolate->has_pending_exception());
-    isolate->clear_pending_exception();
+    ASSERT(isolate()->has_pending_exception());
+    isolate()->clear_pending_exception();
     return false;
   }
   TransferObject(obj, object);
@@ -2023,6 +2049,7 @@
 
 void Genesis::TransferObject(Handle<JSObject> from, Handle<JSObject> to) {
   HandleScope outer;
+  Factory* factory = from->GetIsolate()->factory();
 
   ASSERT(!from->IsJSArray());
   ASSERT(!to->IsJSArray());
@@ -2032,7 +2059,7 @@
 
   // Transfer the prototype (new map is needed).
   Handle<Map> old_to_map = Handle<Map>(to->map());
-  Handle<Map> new_to_map = FACTORY->CopyMapDropTransitions(old_to_map);
+  Handle<Map> new_to_map = factory->CopyMapDropTransitions(old_to_map);
   new_to_map->set_prototype(from->map()->prototype());
   to->set_map(*new_to_map);
 }
@@ -2053,10 +2080,10 @@
 }
 
 
-Genesis::Genesis(Handle<Object> global_object,
+Genesis::Genesis(Isolate* isolate,
+                 Handle<Object> global_object,
                  v8::Handle<v8::ObjectTemplate> global_template,
-                 v8::ExtensionConfiguration* extensions) {
-  Isolate* isolate = Isolate::Current();
+                 v8::ExtensionConfiguration* extensions) : isolate_(isolate) {
   result_ = Handle<Context>::null();
   // If V8 isn't running and cannot be initialized, just return.
   if (!V8::IsRunning() && !V8::Initialize(NULL)) return;
@@ -2086,7 +2113,7 @@
   } else {
     // We get here if there was no context snapshot.
     CreateRoots();
-    Handle<JSFunction> empty_function = CreateEmptyFunction();
+    Handle<JSFunction> empty_function = CreateEmptyFunction(isolate);
     CreateStrictModeFunctionMaps(empty_function);
     Handle<GlobalObject> inner_global;
     Handle<JSGlobalProxy> global_proxy =
@@ -2103,6 +2130,9 @@
     isolate->counters()->contexts_created_from_scratch()->Increment();
   }
 
+  // Install experimental natives.
+  if (!InstallExperimentalNatives()) return;
+
   result_ = global_context_;
 }
 
diff --git a/src/bootstrapper.h b/src/bootstrapper.h
index 3e158d6..018ceef 100644
--- a/src/bootstrapper.h
+++ b/src/bootstrapper.h
@@ -93,6 +93,7 @@
   // Creates a JavaScript Global Context with initial object graph.
   // The returned value is a global handle casted to V8Environment*.
   Handle<Context> CreateEnvironment(
+      Isolate* isolate,
       Handle<Object> global_object,
       v8::Handle<v8::ObjectTemplate> global_template,
       v8::ExtensionConfiguration* extensions);
diff --git a/src/d8.gyp b/src/d8.gyp
index 901fd65..29212dd 100644
--- a/src/d8.gyp
+++ b/src/d8.gyp
@@ -61,6 +61,7 @@
       'variables': {
         'js_files': [
           'd8.js',
+          'macros.py',
         ],
       },
       'actions': [
@@ -72,7 +73,6 @@
           ],
           'outputs': [
             '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc',
-            '<(SHARED_INTERMEDIATE_DIR)/d8-js-empty.cc',
           ],
           'action': [
             'python',
diff --git a/src/data-flow.cc b/src/data-flow.cc
index 79339ed..6a3b05c 100644
--- a/src/data-flow.cc
+++ b/src/data-flow.cc
@@ -63,477 +63,4 @@
   current_value_ = val >> 1;
 }
 
-
-bool AssignedVariablesAnalyzer::Analyze(CompilationInfo* info) {
-  Scope* scope = info->scope();
-  int size = scope->num_parameters() + scope->num_stack_slots();
-  if (size == 0) return true;
-  AssignedVariablesAnalyzer analyzer(info, size);
-  return analyzer.Analyze();
-}
-
-
-AssignedVariablesAnalyzer::AssignedVariablesAnalyzer(CompilationInfo* info,
-                                                     int size)
-    : info_(info), av_(size) {
-}
-
-
-bool AssignedVariablesAnalyzer::Analyze() {
-  ASSERT(av_.length() > 0);
-  VisitStatements(info_->function()->body());
-  return !HasStackOverflow();
-}
-
-
-Variable* AssignedVariablesAnalyzer::FindSmiLoopVariable(ForStatement* stmt) {
-  // The loop must have all necessary parts.
-  if (stmt->init() == NULL || stmt->cond() == NULL || stmt->next() == NULL) {
-    return NULL;
-  }
-  // The initialization statement has to be a simple assignment.
-  Assignment* init = stmt->init()->StatementAsSimpleAssignment();
-  if (init == NULL) return NULL;
-
-  // We only deal with local variables.
-  Variable* loop_var = init->target()->AsVariableProxy()->AsVariable();
-  if (loop_var == NULL || !loop_var->IsStackAllocated()) return NULL;
-
-  // Don't try to get clever with const or dynamic variables.
-  if (loop_var->mode() != Variable::VAR) return NULL;
-
-  // The initial value has to be a smi.
-  Literal* init_lit = init->value()->AsLiteral();
-  if (init_lit == NULL || !init_lit->handle()->IsSmi()) return NULL;
-  int init_value = Smi::cast(*init_lit->handle())->value();
-
-  // The condition must be a compare of variable with <, <=, >, or >=.
-  CompareOperation* cond = stmt->cond()->AsCompareOperation();
-  if (cond == NULL) return NULL;
-  if (cond->op() != Token::LT
-      && cond->op() != Token::LTE
-      && cond->op() != Token::GT
-      && cond->op() != Token::GTE) return NULL;
-
-  // The lhs must be the same variable as in the init expression.
-  if (cond->left()->AsVariableProxy()->AsVariable() != loop_var) return NULL;
-
-  // The rhs must be a smi.
-  Literal* term_lit = cond->right()->AsLiteral();
-  if (term_lit == NULL || !term_lit->handle()->IsSmi()) return NULL;
-  int term_value = Smi::cast(*term_lit->handle())->value();
-
-  // The count operation updates the same variable as in the init expression.
-  CountOperation* update = stmt->next()->StatementAsCountOperation();
-  if (update == NULL) return NULL;
-  if (update->expression()->AsVariableProxy()->AsVariable() != loop_var) {
-    return NULL;
-  }
-
-  // The direction of the count operation must agree with the start and the end
-  // value. We currently do not allow the initial value to be the same as the
-  // terminal value. This _would_ be ok as long as the loop body never executes
-  // or executes exactly one time.
-  if (init_value == term_value) return NULL;
-  if (init_value < term_value && update->op() != Token::INC) return NULL;
-  if (init_value > term_value && update->op() != Token::DEC) return NULL;
-
-  // Check that the update operation cannot overflow the smi range. This can
-  // occur in the two cases where the loop bound is equal to the largest or
-  // smallest smi.
-  if (update->op() == Token::INC && term_value == Smi::kMaxValue) return NULL;
-  if (update->op() == Token::DEC && term_value == Smi::kMinValue) return NULL;
-
-  // Found a smi loop variable.
-  return loop_var;
-}
-
-int AssignedVariablesAnalyzer::BitIndex(Variable* var) {
-  ASSERT(var != NULL);
-  ASSERT(var->IsStackAllocated());
-  Slot* slot = var->AsSlot();
-  if (slot->type() == Slot::PARAMETER) {
-    return slot->index();
-  } else {
-    return info_->scope()->num_parameters() + slot->index();
-  }
-}
-
-
-void AssignedVariablesAnalyzer::RecordAssignedVar(Variable* var) {
-  ASSERT(var != NULL);
-  if (var->IsStackAllocated()) {
-    av_.Add(BitIndex(var));
-  }
-}
-
-
-void AssignedVariablesAnalyzer::MarkIfTrivial(Expression* expr) {
-  Variable* var = expr->AsVariableProxy()->AsVariable();
-  if (var != NULL &&
-      var->IsStackAllocated() &&
-      !var->is_arguments() &&
-      var->mode() != Variable::CONST &&
-      (var->is_this() || !av_.Contains(BitIndex(var)))) {
-    expr->AsVariableProxy()->MarkAsTrivial();
-  }
-}
-
-
-void AssignedVariablesAnalyzer::ProcessExpression(Expression* expr) {
-  BitVector saved_av(av_);
-  av_.Clear();
-  Visit(expr);
-  av_.Union(saved_av);
-}
-
-void AssignedVariablesAnalyzer::VisitBlock(Block* stmt) {
-  VisitStatements(stmt->statements());
-}
-
-
-void AssignedVariablesAnalyzer::VisitExpressionStatement(
-    ExpressionStatement* stmt) {
-  ProcessExpression(stmt->expression());
-}
-
-
-void AssignedVariablesAnalyzer::VisitEmptyStatement(EmptyStatement* stmt) {
-  // Do nothing.
-}
-
-
-void AssignedVariablesAnalyzer::VisitIfStatement(IfStatement* stmt) {
-  ProcessExpression(stmt->condition());
-  Visit(stmt->then_statement());
-  Visit(stmt->else_statement());
-}
-
-
-void AssignedVariablesAnalyzer::VisitContinueStatement(
-    ContinueStatement* stmt) {
-  // Nothing to do.
-}
-
-
-void AssignedVariablesAnalyzer::VisitBreakStatement(BreakStatement* stmt) {
-  // Nothing to do.
-}
-
-
-void AssignedVariablesAnalyzer::VisitReturnStatement(ReturnStatement* stmt) {
-  ProcessExpression(stmt->expression());
-}
-
-
-void AssignedVariablesAnalyzer::VisitWithEnterStatement(
-    WithEnterStatement* stmt) {
-  ProcessExpression(stmt->expression());
-}
-
-
-void AssignedVariablesAnalyzer::VisitWithExitStatement(
-    WithExitStatement* stmt) {
-  // Nothing to do.
-}
-
-
-void AssignedVariablesAnalyzer::VisitSwitchStatement(SwitchStatement* stmt) {
-  BitVector result(av_);
-  av_.Clear();
-  Visit(stmt->tag());
-  result.Union(av_);
-  for (int i = 0; i < stmt->cases()->length(); i++) {
-    CaseClause* clause = stmt->cases()->at(i);
-    if (!clause->is_default()) {
-      av_.Clear();
-      Visit(clause->label());
-      result.Union(av_);
-    }
-    VisitStatements(clause->statements());
-  }
-  av_.Union(result);
-}
-
-
-void AssignedVariablesAnalyzer::VisitDoWhileStatement(DoWhileStatement* stmt) {
-  ProcessExpression(stmt->cond());
-  Visit(stmt->body());
-}
-
-
-void AssignedVariablesAnalyzer::VisitWhileStatement(WhileStatement* stmt) {
-  ProcessExpression(stmt->cond());
-  Visit(stmt->body());
-}
-
-
-void AssignedVariablesAnalyzer::VisitForStatement(ForStatement* stmt) {
-  if (stmt->init() != NULL) Visit(stmt->init());
-  if (stmt->cond() != NULL) ProcessExpression(stmt->cond());
-  if (stmt->next() != NULL) Visit(stmt->next());
-
-  // Process loop body. After visiting the loop body av_ contains
-  // the assigned variables of the loop body.
-  BitVector saved_av(av_);
-  av_.Clear();
-  Visit(stmt->body());
-
-  Variable* var = FindSmiLoopVariable(stmt);
-  if (var != NULL && !av_.Contains(BitIndex(var))) {
-    stmt->set_loop_variable(var);
-  }
-  av_.Union(saved_av);
-}
-
-
-void AssignedVariablesAnalyzer::VisitForInStatement(ForInStatement* stmt) {
-  ProcessExpression(stmt->each());
-  ProcessExpression(stmt->enumerable());
-  Visit(stmt->body());
-}
-
-
-void AssignedVariablesAnalyzer::VisitTryCatchStatement(
-    TryCatchStatement* stmt) {
-  Visit(stmt->try_block());
-  Visit(stmt->catch_block());
-}
-
-
-void AssignedVariablesAnalyzer::VisitTryFinallyStatement(
-    TryFinallyStatement* stmt) {
-  Visit(stmt->try_block());
-  Visit(stmt->finally_block());
-}
-
-
-void AssignedVariablesAnalyzer::VisitDebuggerStatement(
-    DebuggerStatement* stmt) {
-  // Nothing to do.
-}
-
-
-void AssignedVariablesAnalyzer::VisitFunctionLiteral(FunctionLiteral* expr) {
-  // Nothing to do.
-  ASSERT(av_.IsEmpty());
-}
-
-
-void AssignedVariablesAnalyzer::VisitSharedFunctionInfoLiteral(
-    SharedFunctionInfoLiteral* expr) {
-  // Nothing to do.
-  ASSERT(av_.IsEmpty());
-}
-
-
-void AssignedVariablesAnalyzer::VisitConditional(Conditional* expr) {
-  ASSERT(av_.IsEmpty());
-
-  Visit(expr->condition());
-
-  BitVector result(av_);
-  av_.Clear();
-  Visit(expr->then_expression());
-  result.Union(av_);
-
-  av_.Clear();
-  Visit(expr->else_expression());
-  av_.Union(result);
-}
-
-
-void AssignedVariablesAnalyzer::VisitVariableProxy(VariableProxy* expr) {
-  // Nothing to do.
-  ASSERT(av_.IsEmpty());
-}
-
-
-void AssignedVariablesAnalyzer::VisitLiteral(Literal* expr) {
-  // Nothing to do.
-  ASSERT(av_.IsEmpty());
-}
-
-
-void AssignedVariablesAnalyzer::VisitRegExpLiteral(RegExpLiteral* expr) {
-  // Nothing to do.
-  ASSERT(av_.IsEmpty());
-}
-
-
-void AssignedVariablesAnalyzer::VisitObjectLiteral(ObjectLiteral* expr) {
-  ASSERT(av_.IsEmpty());
-  BitVector result(av_.length());
-  for (int i = 0; i < expr->properties()->length(); i++) {
-    Visit(expr->properties()->at(i)->value());
-    result.Union(av_);
-    av_.Clear();
-  }
-  av_ = result;
-}
-
-
-void AssignedVariablesAnalyzer::VisitArrayLiteral(ArrayLiteral* expr) {
-  ASSERT(av_.IsEmpty());
-  BitVector result(av_.length());
-  for (int i = 0; i < expr->values()->length(); i++) {
-    Visit(expr->values()->at(i));
-    result.Union(av_);
-    av_.Clear();
-  }
-  av_ = result;
-}
-
-
-void AssignedVariablesAnalyzer::VisitCatchExtensionObject(
-    CatchExtensionObject* expr) {
-  ASSERT(av_.IsEmpty());
-  Visit(expr->key());
-  ProcessExpression(expr->value());
-}
-
-
-void AssignedVariablesAnalyzer::VisitAssignment(Assignment* expr) {
-  ASSERT(av_.IsEmpty());
-
-  // There are three kinds of assignments: variable assignments, property
-  // assignments, and reference errors (invalid left-hand sides).
-  Variable* var = expr->target()->AsVariableProxy()->AsVariable();
-  Property* prop = expr->target()->AsProperty();
-  ASSERT(var == NULL || prop == NULL);
-
-  if (var != NULL) {
-    MarkIfTrivial(expr->value());
-    Visit(expr->value());
-    if (expr->is_compound()) {
-      // Left-hand side occurs also as an rvalue.
-      MarkIfTrivial(expr->target());
-      ProcessExpression(expr->target());
-    }
-    RecordAssignedVar(var);
-
-  } else if (prop != NULL) {
-    MarkIfTrivial(expr->value());
-    Visit(expr->value());
-    if (!prop->key()->IsPropertyName()) {
-      MarkIfTrivial(prop->key());
-      ProcessExpression(prop->key());
-    }
-    MarkIfTrivial(prop->obj());
-    ProcessExpression(prop->obj());
-
-  } else {
-    Visit(expr->target());
-  }
-}
-
-
-void AssignedVariablesAnalyzer::VisitThrow(Throw* expr) {
-  ASSERT(av_.IsEmpty());
-  Visit(expr->exception());
-}
-
-
-void AssignedVariablesAnalyzer::VisitProperty(Property* expr) {
-  ASSERT(av_.IsEmpty());
-  if (!expr->key()->IsPropertyName()) {
-    MarkIfTrivial(expr->key());
-    Visit(expr->key());
-  }
-  MarkIfTrivial(expr->obj());
-  ProcessExpression(expr->obj());
-}
-
-
-void AssignedVariablesAnalyzer::VisitCall(Call* expr) {
-  ASSERT(av_.IsEmpty());
-  Visit(expr->expression());
-  BitVector result(av_);
-  for (int i = 0; i < expr->arguments()->length(); i++) {
-    av_.Clear();
-    Visit(expr->arguments()->at(i));
-    result.Union(av_);
-  }
-  av_ = result;
-}
-
-
-void AssignedVariablesAnalyzer::VisitCallNew(CallNew* expr) {
-  ASSERT(av_.IsEmpty());
-  Visit(expr->expression());
-  BitVector result(av_);
-  for (int i = 0; i < expr->arguments()->length(); i++) {
-    av_.Clear();
-    Visit(expr->arguments()->at(i));
-    result.Union(av_);
-  }
-  av_ = result;
-}
-
-
-void AssignedVariablesAnalyzer::VisitCallRuntime(CallRuntime* expr) {
-  ASSERT(av_.IsEmpty());
-  BitVector result(av_);
-  for (int i = 0; i < expr->arguments()->length(); i++) {
-    av_.Clear();
-    Visit(expr->arguments()->at(i));
-    result.Union(av_);
-  }
-  av_ = result;
-}
-
-
-void AssignedVariablesAnalyzer::VisitUnaryOperation(UnaryOperation* expr) {
-  ASSERT(av_.IsEmpty());
-  MarkIfTrivial(expr->expression());
-  Visit(expr->expression());
-}
-
-
-void AssignedVariablesAnalyzer::VisitCountOperation(CountOperation* expr) {
-  ASSERT(av_.IsEmpty());
-  if (expr->is_prefix()) MarkIfTrivial(expr->expression());
-  Visit(expr->expression());
-
-  Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
-  if (var != NULL) RecordAssignedVar(var);
-}
-
-
-void AssignedVariablesAnalyzer::VisitBinaryOperation(BinaryOperation* expr) {
-  ASSERT(av_.IsEmpty());
-  MarkIfTrivial(expr->right());
-  Visit(expr->right());
-  MarkIfTrivial(expr->left());
-  ProcessExpression(expr->left());
-}
-
-
-void AssignedVariablesAnalyzer::VisitCompareOperation(CompareOperation* expr) {
-  ASSERT(av_.IsEmpty());
-  MarkIfTrivial(expr->right());
-  Visit(expr->right());
-  MarkIfTrivial(expr->left());
-  ProcessExpression(expr->left());
-}
-
-
-void AssignedVariablesAnalyzer::VisitCompareToNull(CompareToNull* expr) {
-  ASSERT(av_.IsEmpty());
-  MarkIfTrivial(expr->expression());
-  Visit(expr->expression());
-}
-
-
-void AssignedVariablesAnalyzer::VisitThisFunction(ThisFunction* expr) {
-  // Nothing to do.
-  ASSERT(av_.IsEmpty());
-}
-
-
-void AssignedVariablesAnalyzer::VisitDeclaration(Declaration* decl) {
-  UNREACHABLE();
-}
-
-
 } }  // namespace v8::internal
diff --git a/src/data-flow.h b/src/data-flow.h
index 573d7d8..76cff88 100644
--- a/src/data-flow.h
+++ b/src/data-flow.h
@@ -335,44 +335,6 @@
   List<T*> queue_;
 };
 
-
-// Computes the set of assigned variables and annotates variables proxies
-// that are trivial sub-expressions and for-loops where the loop variable
-// is guaranteed to be a smi.
-class AssignedVariablesAnalyzer : public AstVisitor {
- public:
-  static bool Analyze(CompilationInfo* info);
-
- private:
-  AssignedVariablesAnalyzer(CompilationInfo* info, int bits);
-  bool Analyze();
-
-  Variable* FindSmiLoopVariable(ForStatement* stmt);
-
-  int BitIndex(Variable* var);
-
-  void RecordAssignedVar(Variable* var);
-
-  void MarkIfTrivial(Expression* expr);
-
-  // Visits an expression saving the accumulator before, clearing
-  // it before visting and restoring it after visiting.
-  void ProcessExpression(Expression* expr);
-
-  // AST node visit functions.
-#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
-  AST_NODE_LIST(DECLARE_VISIT)
-#undef DECLARE_VISIT
-
-  CompilationInfo* info_;
-
-  // Accumulator for assigned variables set.
-  BitVector av_;
-
-  DISALLOW_COPY_AND_ASSIGN(AssignedVariablesAnalyzer);
-};
-
-
 } }  // namespace v8::internal
 
 
diff --git a/src/debug.cc b/src/debug.cc
index 093f38e..3691333 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -477,21 +477,6 @@
     // calling convention used by the call site.
     Handle<Code> dbgbrk_code(Debug::FindDebugBreak(code, mode));
     rinfo()->set_target_address(dbgbrk_code->entry());
-
-    // For stubs that refer back to an inlined version clear the cached map for
-    // the inlined case to always go through the IC. As long as the break point
-    // is set the patching performed by the runtime system will take place in
-    // the code copy and will therefore have no effect on the running code
-    // keeping it from using the inlined code.
-    if (code->is_keyed_load_stub()) {
-      KeyedLoadIC::ClearInlinedVersion(pc());
-    } else if (code->is_keyed_store_stub()) {
-      KeyedStoreIC::ClearInlinedVersion(pc());
-    } else if (code->is_load_stub()) {
-      LoadIC::ClearInlinedVersion(pc());
-    } else if (code->is_store_stub()) {
-      StoreIC::ClearInlinedVersion(pc());
-    }
   }
 }
 
@@ -499,20 +484,6 @@
 void BreakLocationIterator::ClearDebugBreakAtIC() {
   // Patch the code to the original invoke.
   rinfo()->set_target_address(original_rinfo()->target_address());
-
-  RelocInfo::Mode mode = rmode();
-  if (RelocInfo::IsCodeTarget(mode)) {
-    AssertNoAllocation nogc;
-    Address target = original_rinfo()->target_address();
-    Code* code = Code::GetCodeFromTargetAddress(target);
-
-    // Restore the inlined version of keyed stores to get back to the
-    // fast case.  We need to patch back the keyed store because no
-    // patching happens when running normally.  For keyed loads, the
-    // map check will get patched back when running normally after ICs
-    // have been cleared at GC.
-    if (code->is_keyed_store_stub()) KeyedStoreIC::RestoreInlinedVersion(pc());
-  }
 }
 
 
@@ -843,6 +814,7 @@
   HandleScope scope(isolate_);
   Handle<Context> context =
       isolate_->bootstrapper()->CreateEnvironment(
+          isolate_,
           Handle<Object>::null(),
           v8::Handle<ObjectTemplate>(),
           NULL);
diff --git a/src/disassembler.cc b/src/disassembler.cc
index 65e1668..368c3a8 100644
--- a/src/disassembler.cc
+++ b/src/disassembler.cc
@@ -282,6 +282,9 @@
         } else {
           out.AddFormatted(" %s", Code::Kind2String(kind));
         }
+        if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
+          out.AddFormatted(" (id = %d)", static_cast<int>(relocinfo.data()));
+        }
       } else if (rmode == RelocInfo::RUNTIME_ENTRY &&
                  Isolate::Current()->deoptimizer_data() != NULL) {
         // A runtime entry reloinfo might be a deoptimization bailout.
diff --git a/src/extensions/experimental/collator.cc b/src/extensions/experimental/collator.cc
new file mode 100644
index 0000000..7d1a21d
--- /dev/null
+++ b/src/extensions/experimental/collator.cc
@@ -0,0 +1,218 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "collator.h"
+
+#include "unicode/coll.h"
+#include "unicode/locid.h"
+#include "unicode/ucol.h"
+
+namespace v8 {
+namespace internal {
+
+v8::Persistent<v8::FunctionTemplate> Collator::collator_template_;
+
+icu::Collator* Collator::UnpackCollator(v8::Handle<v8::Object> obj) {
+  if (collator_template_->HasInstance(obj)) {
+    return static_cast<icu::Collator*>(obj->GetPointerFromInternalField(0));
+  }
+
+  return NULL;
+}
+
+void Collator::DeleteCollator(v8::Persistent<v8::Value> object, void* param) {
+  v8::Persistent<v8::Object> persistent_object =
+      v8::Persistent<v8::Object>::Cast(object);
+
+  // First delete the hidden C++ object.
+  // Unpacking should never return NULL here. That would only happen if
+  // this method is used as the weak callback for persistent handles not
+  // pointing to a collator.
+  delete UnpackCollator(persistent_object);
+
+  // Then dispose of the persistent handle to JS object.
+  persistent_object.Dispose();
+}
+
+// Throws a JavaScript exception.
+static v8::Handle<v8::Value> ThrowUnexpectedObjectError() {
+  // Returns undefined, and schedules an exception to be thrown.
+  return v8::ThrowException(v8::Exception::Error(
+      v8::String::New("Collator method called on an object "
+                      "that is not a Collator.")));
+}
+
+// Extract a boolean option named in |option| and set it to |result|.
+// Return true if it's specified. Otherwise, return false.
+static bool ExtractBooleanOption(const v8::Local<v8::Object>& options,
+                                 const char* option,
+                                 bool* result) {
+  v8::HandleScope handle_scope;
+  v8::TryCatch try_catch;
+  v8::Handle<v8::Value> value = options->Get(v8::String::New(option));
+  if (try_catch.HasCaught()) {
+    return false;
+  }
+  // No need to check if |value| is empty because it's taken care of
+  // by TryCatch above.
+  if (!value->IsUndefined() && !value->IsNull()) {
+    if (value->IsBoolean()) {
+      *result = value->BooleanValue();
+      return true;
+    }
+  }
+  return false;
+}
+
+// When there's an ICU error, throw a JavaScript error with |message|.
+static v8::Handle<v8::Value> ThrowExceptionForICUError(const char* message) {
+  return v8::ThrowException(v8::Exception::Error(v8::String::New(message)));
+}
+
+v8::Handle<v8::Value> Collator::CollatorCompare(const v8::Arguments& args) {
+  if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsString()) {
+    return v8::ThrowException(v8::Exception::SyntaxError(
+        v8::String::New("Two string arguments are required.")));
+  }
+
+  icu::Collator* collator = UnpackCollator(args.Holder());
+  if (!collator) {
+    return ThrowUnexpectedObjectError();
+  }
+
+  v8::String::Value string_value1(args[0]);
+  v8::String::Value string_value2(args[1]);
+  const UChar* string1 = reinterpret_cast<const UChar*>(*string_value1);
+  const UChar* string2 = reinterpret_cast<const UChar*>(*string_value2);
+  UErrorCode status = U_ZERO_ERROR;
+  UCollationResult result = collator->compare(
+      string1, string_value1.length(), string2, string_value2.length(), status);
+
+  if (U_FAILURE(status)) {
+    return ThrowExceptionForICUError(
+        "Unexpected failure in Collator.compare.");
+  }
+
+  return v8::Int32::New(result);
+}
+
+v8::Handle<v8::Value> Collator::JSCollator(const v8::Arguments& args) {
+  v8::HandleScope handle_scope;
+
+  if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsObject()) {
+    return v8::ThrowException(v8::Exception::SyntaxError(
+        v8::String::New("Locale and collation options are required.")));
+  }
+
+  v8::String::AsciiValue locale(args[0]);
+  icu::Locale icu_locale(*locale);
+
+  icu::Collator* collator = NULL;
+  UErrorCode status = U_ZERO_ERROR;
+  collator = icu::Collator::createInstance(icu_locale, status);
+
+  if (U_FAILURE(status)) {
+    delete collator;
+    return ThrowExceptionForICUError("Failed to create collator.");
+  }
+
+  v8::Local<v8::Object> options(args[1]->ToObject());
+
+  // Below, we change collation options that are explicitly specified
+  // by a caller in JavaScript. Otherwise, we don't touch because
+  // we don't want to change the locale-dependent default value.
+  // The three options below are very likely to have the same default
+  // across locales, but I haven't checked them all. Others we may add
+  // in the future have certainly locale-dependent default (e.g.
+  // caseFirst is upperFirst for Danish while is off for most other locales).
+
+  bool ignore_case, ignore_accents, numeric;
+
+  if (ExtractBooleanOption(options, "ignoreCase", &ignore_case)) {
+    collator->setAttribute(UCOL_CASE_LEVEL, ignore_case ? UCOL_OFF : UCOL_ON,
+                           status);
+    if (U_FAILURE(status)) {
+      delete collator;
+      return ThrowExceptionForICUError("Failed to set ignoreCase.");
+    }
+  }
+
+  // Accents are taken into account with strength secondary or higher.
+  if (ExtractBooleanOption(options, "ignoreAccents", &ignore_accents)) {
+    if (!ignore_accents) {
+      collator->setStrength(icu::Collator::SECONDARY);
+    } else {
+      collator->setStrength(icu::Collator::PRIMARY);
+    }
+  }
+
+  if (ExtractBooleanOption(options, "numeric", &numeric)) {
+    collator->setAttribute(UCOL_NUMERIC_COLLATION,
+                           numeric ? UCOL_ON : UCOL_OFF, status);
+    if (U_FAILURE(status)) {
+      delete collator;
+      return ThrowExceptionForICUError("Failed to set numeric sort option.");
+    }
+  }
+
+  if (collator_template_.IsEmpty()) {
+    v8::Local<v8::FunctionTemplate> raw_template(v8::FunctionTemplate::New());
+    raw_template->SetClassName(v8::String::New("v8Locale.Collator"));
+
+    // Define internal field count on instance template.
+    v8::Local<v8::ObjectTemplate> object_template =
+        raw_template->InstanceTemplate();
+
+    // Set aside internal fields for icu collator.
+    object_template->SetInternalFieldCount(1);
+
+    // Define all of the prototype methods on prototype template.
+    v8::Local<v8::ObjectTemplate> proto = raw_template->PrototypeTemplate();
+    proto->Set(v8::String::New("compare"),
+               v8::FunctionTemplate::New(CollatorCompare));
+
+    collator_template_ =
+        v8::Persistent<v8::FunctionTemplate>::New(raw_template);
+  }
+
+  // Create an empty object wrapper.
+  v8::Local<v8::Object> local_object =
+      collator_template_->GetFunction()->NewInstance();
+  v8::Persistent<v8::Object> wrapper =
+      v8::Persistent<v8::Object>::New(local_object);
+
+  // Set collator as internal field of the resulting JS object.
+  wrapper->SetPointerInInternalField(0, collator);
+
+  // Make object handle weak so we can delete iterator once GC kicks in.
+  wrapper.MakeWeak(NULL, DeleteCollator);
+
+  return wrapper;
+}
+
+} }  // namespace v8::internal
+
diff --git a/src/extensions/experimental/collator.h b/src/extensions/experimental/collator.h
new file mode 100644
index 0000000..10d6ffb
--- /dev/null
+++ b/src/extensions/experimental/collator.h
@@ -0,0 +1,69 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_EXTENSIONS_EXPERIMENTAL_COLLATOR_H
+#define V8_EXTENSIONS_EXPERIMENTAL_COLLATOR_H_
+
+#include <v8.h>
+
+#include "unicode/uversion.h"
+
+namespace U_ICU_NAMESPACE {
+class Collator;
+class UnicodeString;
+}
+
+namespace v8 {
+namespace internal {
+
+class Collator {
+ public:
+  static v8::Handle<v8::Value> JSCollator(const v8::Arguments& args);
+
+  // Helper methods for various bindings.
+
+  // Unpacks collator object from corresponding JavaScript object.
+  static icu::Collator* UnpackCollator(v8::Handle<v8::Object> obj);
+
+  // Release memory we allocated for the Collator once the JS object that
+  // holds the pointer gets garbage collected.
+  static void DeleteCollator(v8::Persistent<v8::Value> object, void* param);
+
+  // Compare two strings and returns -1, 0 and 1 depending on
+  // whether string1 is smaller than, equal to or larger than string2.
+  static v8::Handle<v8::Value> CollatorCompare(const v8::Arguments& args);
+
+ private:
+  Collator() {}
+
+  static v8::Persistent<v8::FunctionTemplate> collator_template_;
+};
+
+} }  // namespace v8::internal
+
+#endif  // V8_EXTENSIONS_EXPERIMENTAL_COLLATOR
+
diff --git a/src/extensions/experimental/experimental.gyp b/src/extensions/experimental/experimental.gyp
index a8585fd..d1194ce 100644
--- a/src/extensions/experimental/experimental.gyp
+++ b/src/extensions/experimental/experimental.gyp
@@ -39,9 +39,13 @@
       'sources': [
         'break-iterator.cc',
         'break-iterator.h',
+        'collator.cc',
+        'collator.h',
         'i18n-extension.cc',
         'i18n-extension.h',
-	'<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
+        'i18n-locale.cc',
+        'i18n-locale.h',
+        '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
       ],
       'include_dirs': [
         '<(icu_src_dir)/public/common',
@@ -49,7 +53,7 @@
       ],
       'dependencies': [
         '<(icu_src_dir)/icu.gyp:*',
-	'js2c_i18n#host',
+        'js2c_i18n#host',
         '../../../tools/gyp/v8.gyp:v8',
       ],
     },
@@ -59,28 +63,27 @@
       'toolsets': ['host'],
       'variables': {
         'library_files': [
-	  'i18n.js'
-	],
+          'i18n.js'
+        ],
       },
       'actions': [
         {
-	  'action_name': 'js2c_i18n',
-	  'inputs': [
-	    '../../../tools/js2c.py',
-	    '<@(library_files)',
-	  ],
-	  'outputs': [
-	    '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
-	    '<(SHARED_INTERMEDIATE_DIR)/i18n-js-empty.cc'
-	  ],
-	  'action': [
-	    'python',
-	    '../../../tools/js2c.py',
-	    '<@(_outputs)',
-	    'I18N',
-	    '<@(library_files)'
-	  ],
-	},
+          'action_name': 'js2c_i18n',
+          'inputs': [
+            '../../../tools/js2c.py',
+            '<@(library_files)',
+          ],
+          'outputs': [
+            '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
+          ],
+          'action': [
+            'python',
+            '../../../tools/js2c.py',
+            '<@(_outputs)',
+            'I18N',
+            '<@(library_files)'
+          ],
+        },
       ],
     },
   ],  # targets
diff --git a/src/extensions/experimental/i18n-extension.cc b/src/extensions/experimental/i18n-extension.cc
index 6e3ab15..56bea23 100644
--- a/src/extensions/experimental/i18n-extension.cc
+++ b/src/extensions/experimental/i18n-extension.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -27,13 +27,10 @@
 
 #include "i18n-extension.h"
 
-#include <algorithm>
-#include <string>
-
 #include "break-iterator.h"
+#include "collator.h"
+#include "i18n-locale.h"
 #include "natives.h"
-#include "unicode/locid.h"
-#include "unicode/uloc.h"
 
 namespace v8 {
 namespace internal {
@@ -57,166 +54,30 @@
 v8::Handle<v8::FunctionTemplate> I18NExtension::GetNativeFunction(
     v8::Handle<v8::String> name) {
   if (name->Equals(v8::String::New("NativeJSLocale"))) {
-    return v8::FunctionTemplate::New(JSLocale);
+    return v8::FunctionTemplate::New(I18NLocale::JSLocale);
   } else if (name->Equals(v8::String::New("NativeJSAvailableLocales"))) {
-    return v8::FunctionTemplate::New(JSAvailableLocales);
+    return v8::FunctionTemplate::New(I18NLocale::JSAvailableLocales);
   } else if (name->Equals(v8::String::New("NativeJSMaximizedLocale"))) {
-    return v8::FunctionTemplate::New(JSMaximizedLocale);
+    return v8::FunctionTemplate::New(I18NLocale::JSMaximizedLocale);
   } else if (name->Equals(v8::String::New("NativeJSMinimizedLocale"))) {
-    return v8::FunctionTemplate::New(JSMinimizedLocale);
+    return v8::FunctionTemplate::New(I18NLocale::JSMinimizedLocale);
   } else if (name->Equals(v8::String::New("NativeJSDisplayLanguage"))) {
-    return v8::FunctionTemplate::New(JSDisplayLanguage);
+    return v8::FunctionTemplate::New(I18NLocale::JSDisplayLanguage);
   } else if (name->Equals(v8::String::New("NativeJSDisplayScript"))) {
-    return v8::FunctionTemplate::New(JSDisplayScript);
+    return v8::FunctionTemplate::New(I18NLocale::JSDisplayScript);
   } else if (name->Equals(v8::String::New("NativeJSDisplayRegion"))) {
-    return v8::FunctionTemplate::New(JSDisplayRegion);
+    return v8::FunctionTemplate::New(I18NLocale::JSDisplayRegion);
   } else if (name->Equals(v8::String::New("NativeJSDisplayName"))) {
-    return v8::FunctionTemplate::New(JSDisplayName);
+    return v8::FunctionTemplate::New(I18NLocale::JSDisplayName);
   } else if (name->Equals(v8::String::New("NativeJSBreakIterator"))) {
     return v8::FunctionTemplate::New(BreakIterator::JSBreakIterator);
+  } else if (name->Equals(v8::String::New("NativeJSCollator"))) {
+    return v8::FunctionTemplate::New(Collator::JSCollator);
   }
 
   return v8::Handle<v8::FunctionTemplate>();
 }
 
-v8::Handle<v8::Value> I18NExtension::JSLocale(const v8::Arguments& args) {
-  // TODO(cira): Fetch browser locale. Accept en-US as good default for now.
-  // We could possibly pass browser locale as a parameter in the constructor.
-  std::string locale_name("en-US");
-  if (args.Length() == 1 && args[0]->IsString()) {
-    locale_name = *v8::String::Utf8Value(args[0]->ToString());
-  }
-
-  v8::Local<v8::Object> locale = v8::Object::New();
-  locale->Set(v8::String::New("locale"), v8::String::New(locale_name.c_str()));
-
-  icu::Locale icu_locale(locale_name.c_str());
-
-  const char* language = icu_locale.getLanguage();
-  locale->Set(v8::String::New("language"), v8::String::New(language));
-
-  const char* script = icu_locale.getScript();
-  if (strlen(script)) {
-    locale->Set(v8::String::New("script"), v8::String::New(script));
-  }
-
-  const char* region = icu_locale.getCountry();
-  if (strlen(region)) {
-    locale->Set(v8::String::New("region"), v8::String::New(region));
-  }
-
-  return locale;
-}
-
-// TODO(cira): Filter out locales that Chrome doesn't support.
-v8::Handle<v8::Value> I18NExtension::JSAvailableLocales(
-    const v8::Arguments& args) {
-  v8::Local<v8::Array> all_locales = v8::Array::New();
-
-  int count = 0;
-  const icu::Locale* icu_locales = icu::Locale::getAvailableLocales(count);
-  for (int i = 0; i < count; ++i) {
-    all_locales->Set(i, v8::String::New(icu_locales[i].getName()));
-  }
-
-  return all_locales;
-}
-
-// Use - as tag separator, not _ that ICU uses.
-static std::string NormalizeLocale(const std::string& locale) {
-  std::string result(locale);
-  // TODO(cira): remove STL dependency.
-  std::replace(result.begin(), result.end(), '_', '-');
-  return result;
-}
-
-v8::Handle<v8::Value> I18NExtension::JSMaximizedLocale(
-    const v8::Arguments& args) {
-  if (!args.Length() || !args[0]->IsString()) {
-    return v8::Undefined();
-  }
-
-  UErrorCode status = U_ZERO_ERROR;
-  std::string locale_name = *v8::String::Utf8Value(args[0]->ToString());
-  char max_locale[ULOC_FULLNAME_CAPACITY];
-  uloc_addLikelySubtags(locale_name.c_str(), max_locale,
-                        sizeof(max_locale), &status);
-  if (U_FAILURE(status)) {
-    return v8::Undefined();
-  }
-
-  return v8::String::New(NormalizeLocale(max_locale).c_str());
-}
-
-v8::Handle<v8::Value> I18NExtension::JSMinimizedLocale(
-    const v8::Arguments& args) {
-  if (!args.Length() || !args[0]->IsString()) {
-    return v8::Undefined();
-  }
-
-  UErrorCode status = U_ZERO_ERROR;
-  std::string locale_name = *v8::String::Utf8Value(args[0]->ToString());
-  char min_locale[ULOC_FULLNAME_CAPACITY];
-  uloc_minimizeSubtags(locale_name.c_str(), min_locale,
-                       sizeof(min_locale), &status);
-  if (U_FAILURE(status)) {
-    return v8::Undefined();
-  }
-
-  return v8::String::New(NormalizeLocale(min_locale).c_str());
-}
-
-// Common code for JSDisplayXXX methods.
-static v8::Handle<v8::Value> GetDisplayItem(const v8::Arguments& args,
-                                            const std::string& item) {
-  if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsString()) {
-    return v8::Undefined();
-  }
-
-  std::string base_locale = *v8::String::Utf8Value(args[0]->ToString());
-  icu::Locale icu_locale(base_locale.c_str());
-  icu::Locale display_locale =
-      icu::Locale(*v8::String::Utf8Value(args[1]->ToString()));
-  icu::UnicodeString result;
-  if (item == "language") {
-    icu_locale.getDisplayLanguage(display_locale, result);
-  } else if (item == "script") {
-    icu_locale.getDisplayScript(display_locale, result);
-  } else if (item == "region") {
-    icu_locale.getDisplayCountry(display_locale, result);
-  } else if (item == "name") {
-    icu_locale.getDisplayName(display_locale, result);
-  } else {
-    return v8::Undefined();
-  }
-
-  if (result.length()) {
-    return v8::String::New(
-        reinterpret_cast<const uint16_t*>(result.getBuffer()), result.length());
-  }
-
-  return v8::Undefined();
-}
-
-v8::Handle<v8::Value> I18NExtension::JSDisplayLanguage(
-    const v8::Arguments& args) {
-  return GetDisplayItem(args, "language");
-}
-
-v8::Handle<v8::Value> I18NExtension::JSDisplayScript(
-    const v8::Arguments& args) {
-  return GetDisplayItem(args, "script");
-}
-
-v8::Handle<v8::Value> I18NExtension::JSDisplayRegion(
-    const v8::Arguments& args) {
-  return GetDisplayItem(args, "region");
-}
-
-v8::Handle<v8::Value> I18NExtension::JSDisplayName(const v8::Arguments& args) {
-  return GetDisplayItem(args, "name");
-}
-
 I18NExtension* I18NExtension::get() {
   if (!extension_) {
     extension_ = new I18NExtension();
diff --git a/src/extensions/experimental/i18n-extension.h b/src/extensions/experimental/i18n-extension.h
index 54c973f..b4dc7c3 100644
--- a/src/extensions/experimental/i18n-extension.h
+++ b/src/extensions/experimental/i18n-extension.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -41,16 +41,6 @@
   virtual v8::Handle<v8::FunctionTemplate> GetNativeFunction(
       v8::Handle<v8::String> name);
 
-  // Implementations of window.Locale methods.
-  static v8::Handle<v8::Value> JSLocale(const v8::Arguments& args);
-  static v8::Handle<v8::Value> JSAvailableLocales(const v8::Arguments& args);
-  static v8::Handle<v8::Value> JSMaximizedLocale(const v8::Arguments& args);
-  static v8::Handle<v8::Value> JSMinimizedLocale(const v8::Arguments& args);
-  static v8::Handle<v8::Value> JSDisplayLanguage(const v8::Arguments& args);
-  static v8::Handle<v8::Value> JSDisplayScript(const v8::Arguments& args);
-  static v8::Handle<v8::Value> JSDisplayRegion(const v8::Arguments& args);
-  static v8::Handle<v8::Value> JSDisplayName(const v8::Arguments& args);
-
   // V8 code prefers Register, while Chrome and WebKit use get kind of methods.
   static void Register();
   static I18NExtension* get();
diff --git a/src/extensions/experimental/i18n-locale.cc b/src/extensions/experimental/i18n-locale.cc
new file mode 100644
index 0000000..e5e1cf8
--- /dev/null
+++ b/src/extensions/experimental/i18n-locale.cc
@@ -0,0 +1,172 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "i18n-locale.h"
+
+#include <algorithm>
+#include <string>
+
+#include "unicode/locid.h"
+#include "unicode/uloc.h"
+
+namespace v8 {
+namespace internal {
+
+v8::Handle<v8::Value> I18NLocale::JSLocale(const v8::Arguments& args) {
+  // TODO(cira): Fetch browser locale. Accept en-US as good default for now.
+  // We could possibly pass browser locale as a parameter in the constructor.
+  std::string locale_name("en-US");
+  if (args.Length() == 1 && args[0]->IsString()) {
+    locale_name = *v8::String::Utf8Value(args[0]->ToString());
+  }
+
+  v8::Local<v8::Object> locale = v8::Object::New();
+  locale->Set(v8::String::New("locale"), v8::String::New(locale_name.c_str()));
+
+  icu::Locale icu_locale(locale_name.c_str());
+
+  const char* language = icu_locale.getLanguage();
+  locale->Set(v8::String::New("language"), v8::String::New(language));
+
+  const char* script = icu_locale.getScript();
+  if (strlen(script)) {
+    locale->Set(v8::String::New("script"), v8::String::New(script));
+  }
+
+  const char* region = icu_locale.getCountry();
+  if (strlen(region)) {
+    locale->Set(v8::String::New("region"), v8::String::New(region));
+  }
+
+  return locale;
+}
+
+// TODO(cira): Filter out locales that Chrome doesn't support.
+v8::Handle<v8::Value> I18NLocale::JSAvailableLocales(
+    const v8::Arguments& args) {
+  v8::Local<v8::Array> all_locales = v8::Array::New();
+
+  int count = 0;
+  const icu::Locale* icu_locales = icu::Locale::getAvailableLocales(count);
+  for (int i = 0; i < count; ++i) {
+    all_locales->Set(i, v8::String::New(icu_locales[i].getName()));
+  }
+
+  return all_locales;
+}
+
+// Use - as tag separator, not _ that ICU uses.
+static std::string NormalizeLocale(const std::string& locale) {
+  std::string result(locale);
+  // TODO(cira): remove STL dependency.
+  std::replace(result.begin(), result.end(), '_', '-');
+  return result;
+}
+
+v8::Handle<v8::Value> I18NLocale::JSMaximizedLocale(const v8::Arguments& args) {
+  if (!args.Length() || !args[0]->IsString()) {
+    return v8::Undefined();
+  }
+
+  UErrorCode status = U_ZERO_ERROR;
+  std::string locale_name = *v8::String::Utf8Value(args[0]->ToString());
+  char max_locale[ULOC_FULLNAME_CAPACITY];
+  uloc_addLikelySubtags(locale_name.c_str(), max_locale,
+                        sizeof(max_locale), &status);
+  if (U_FAILURE(status)) {
+    return v8::Undefined();
+  }
+
+  return v8::String::New(NormalizeLocale(max_locale).c_str());
+}
+
+v8::Handle<v8::Value> I18NLocale::JSMinimizedLocale(const v8::Arguments& args) {
+  if (!args.Length() || !args[0]->IsString()) {
+    return v8::Undefined();
+  }
+
+  UErrorCode status = U_ZERO_ERROR;
+  std::string locale_name = *v8::String::Utf8Value(args[0]->ToString());
+  char min_locale[ULOC_FULLNAME_CAPACITY];
+  uloc_minimizeSubtags(locale_name.c_str(), min_locale,
+                       sizeof(min_locale), &status);
+  if (U_FAILURE(status)) {
+    return v8::Undefined();
+  }
+
+  return v8::String::New(NormalizeLocale(min_locale).c_str());
+}
+
+// Common code for JSDisplayXXX methods.
+static v8::Handle<v8::Value> GetDisplayItem(const v8::Arguments& args,
+                                            const std::string& item) {
+  if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsString()) {
+    return v8::Undefined();
+  }
+
+  std::string base_locale = *v8::String::Utf8Value(args[0]->ToString());
+  icu::Locale icu_locale(base_locale.c_str());
+  icu::Locale display_locale =
+      icu::Locale(*v8::String::Utf8Value(args[1]->ToString()));
+  icu::UnicodeString result;
+  if (item == "language") {
+    icu_locale.getDisplayLanguage(display_locale, result);
+  } else if (item == "script") {
+    icu_locale.getDisplayScript(display_locale, result);
+  } else if (item == "region") {
+    icu_locale.getDisplayCountry(display_locale, result);
+  } else if (item == "name") {
+    icu_locale.getDisplayName(display_locale, result);
+  } else {
+    return v8::Undefined();
+  }
+
+  if (result.length()) {
+    return v8::String::New(
+        reinterpret_cast<const uint16_t*>(result.getBuffer()), result.length());
+  }
+
+  return v8::Undefined();
+}
+
+v8::Handle<v8::Value> I18NLocale::JSDisplayLanguage(const v8::Arguments& args) {
+  return GetDisplayItem(args, "language");
+}
+
+v8::Handle<v8::Value> I18NLocale::JSDisplayScript(const v8::Arguments& args) {
+  return GetDisplayItem(args, "script");
+}
+
+v8::Handle<v8::Value> I18NLocale::JSDisplayRegion(const v8::Arguments& args) {
+  return GetDisplayItem(args, "region");
+}
+
+v8::Handle<v8::Value> I18NLocale::JSDisplayName(const v8::Arguments& args) {
+  return GetDisplayItem(args, "name");
+}
+
+} }  // namespace v8::internal
diff --git a/src/extensions/experimental/i18n-locale.h b/src/extensions/experimental/i18n-locale.h
new file mode 100644
index 0000000..aa9adbe
--- /dev/null
+++ b/src/extensions/experimental/i18n-locale.h
@@ -0,0 +1,53 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
+#define V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
+
+#include <v8.h>
+
+namespace v8 {
+namespace internal {
+
+class I18NLocale {
+ public:
+  I18NLocale() {}
+
+  // Implementations of window.Locale methods.
+  static v8::Handle<v8::Value> JSLocale(const v8::Arguments& args);
+  static v8::Handle<v8::Value> JSAvailableLocales(const v8::Arguments& args);
+  static v8::Handle<v8::Value> JSMaximizedLocale(const v8::Arguments& args);
+  static v8::Handle<v8::Value> JSMinimizedLocale(const v8::Arguments& args);
+  static v8::Handle<v8::Value> JSDisplayLanguage(const v8::Arguments& args);
+  static v8::Handle<v8::Value> JSDisplayScript(const v8::Arguments& args);
+  static v8::Handle<v8::Value> JSDisplayRegion(const v8::Arguments& args);
+  static v8::Handle<v8::Value> JSDisplayName(const v8::Arguments& args);
+};
+
+} }  // namespace v8::internal
+
+#endif  // V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
diff --git a/src/extensions/experimental/i18n.js b/src/extensions/experimental/i18n.js
index baf3859..5a74905 100644
--- a/src/extensions/experimental/i18n.js
+++ b/src/extensions/experimental/i18n.js
@@ -101,3 +101,16 @@
 v8Locale.prototype.v8CreateBreakIterator = function(type) {
   return new v8Locale.v8BreakIterator(this.locale, type);
 };
+
+// TODO(jungshik): Set |collator.options| to actually recognized / resolved
+// values.
+v8Locale.Collator = function(locale, options) {
+  native function NativeJSCollator();
+  var collator = NativeJSCollator(locale,
+      options === undefined ? {} : options);
+  return collator;
+};
+
+v8Locale.prototype.createCollator = function(options) {
+  return new v8Locale.Collator(this.locale, options);
+};
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 15f8def..69139bb 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -96,6 +96,9 @@
 //
 #define FLAG FLAG_FULL
 
+// Flags for experimental language features.
+DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
+
 // Flags for Crankshaft.
 #ifdef V8_TARGET_ARCH_MIPS
   DEFINE_bool(crankshaft, false, "use crankshaft")
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index d6ba56e..1c2f7bf 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -744,7 +744,7 @@
       if (ShouldInlineSmiCase(op)) {
         EmitInlineSmiBinaryOp(expr, op, mode, left, right);
       } else {
-        EmitBinaryOp(op, mode);
+        EmitBinaryOp(expr, op, mode);
       }
       break;
     }
diff --git a/src/full-codegen.h b/src/full-codegen.h
index d6ed1b9..a373809 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -445,12 +445,13 @@
 
   // Apply the compound assignment operator. Expects the left operand on top
   // of the stack and the right one in the accumulator.
-  void EmitBinaryOp(Token::Value op,
+  void EmitBinaryOp(BinaryOperation* expr,
+                    Token::Value op,
                     OverwriteMode mode);
 
   // Helper functions for generating inlined smi code for certain
   // binary operations.
-  void EmitInlineSmiBinaryOp(Expression* expr,
+  void EmitInlineSmiBinaryOp(BinaryOperation* expr,
                              Token::Value op,
                              OverwriteMode mode,
                              Expression* left,
@@ -512,12 +513,16 @@
   static Register context_register();
 
   // Helper for calling an IC stub.
-  void EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode);
+  void EmitCallIC(Handle<Code> ic,
+                  RelocInfo::Mode mode,
+                  unsigned ast_id);
 
   // Calling an IC stub with a patch site. Passing NULL for patch_site
   // or non NULL patch_site which is not activated indicates no inlined smi code
   // and emits a nop after the IC call.
-  void EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site);
+  void EmitCallIC(Handle<Code> ic,
+                  JumpPatchSite* patch_site,
+                  unsigned ast_id);
 
   // Set fields in the stack frame. Offsets are the frame pointer relative
   // offsets defined in, e.g., StandardFrameConstants.
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index 032ca76..3b01f57 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -1017,10 +1017,9 @@
 
 HConstant::HConstant(Handle<Object> handle, Representation r)
     : handle_(handle),
-      constant_type_(HType::TypeFromValue(handle)),
       has_int32_value_(false),
-      int32_value_(0),
       has_double_value_(false),
+      int32_value_(0),
       double_value_(0)  {
   set_representation(r);
   SetFlag(kUseGVN);
@@ -1435,7 +1434,7 @@
 
 
 HType HConstant::CalculateInferredType() {
-  return constant_type_;
+  return HType::TypeFromValue(handle_);
 }
 
 
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index a623775..e32a09c 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -114,6 +114,7 @@
   V(HasCachedArrayIndex)                       \
   V(InstanceOf)                                \
   V(InstanceOfKnownGlobal)                     \
+  V(InvokeFunction)                            \
   V(IsNull)                                    \
   V(IsObject)                                  \
   V(IsSmi)                                     \
@@ -155,6 +156,7 @@
   V(StoreKeyedGeneric)                         \
   V(StoreNamedField)                           \
   V(StoreNamedGeneric)                         \
+  V(StringAdd)                                 \
   V(StringCharCodeAt)                          \
   V(StringCharFromCode)                        \
   V(StringLength)                              \
@@ -1243,6 +1245,23 @@
 };
 
 
+class HInvokeFunction: public HBinaryCall {
+ public:
+  HInvokeFunction(HValue* context, HValue* function, int argument_count)
+      : HBinaryCall(context, function, argument_count) {
+  }
+
+  virtual Representation RequiredInputRepresentation(int index) const {
+    return Representation::Tagged();
+  }
+
+  HValue* context() { return first(); }
+  HValue* function() { return second(); }
+
+  DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke_function")
+};
+
+
 class HCallConstantFunction: public HCall<0> {
  public:
   HCallConstantFunction(Handle<JSFunction> function, int argument_count)
@@ -1707,6 +1726,16 @@
   virtual void Verify();
 #endif
 
+  virtual HValue* Canonicalize() {
+    if (!value()->type().IsUninitialized() &&
+        value()->type().IsString() &&
+        first() == FIRST_STRING_TYPE &&
+        last() == LAST_STRING_TYPE) {
+      return NULL;
+    }
+    return this;
+  }
+
   static HCheckInstanceType* NewIsJSObjectOrJSFunction(HValue* value);
 
   InstanceType first() const { return first_; }
@@ -1748,6 +1777,18 @@
   virtual void Verify();
 #endif
 
+  virtual HValue* Canonicalize() {
+    HType value_type = value()->type();
+    if (!value_type.IsUninitialized() &&
+        (value_type.IsHeapNumber() ||
+         value_type.IsString() ||
+         value_type.IsBoolean() ||
+         value_type.IsNonPrimitive())) {
+      return NULL;
+    }
+    return this;
+  }
+
   DECLARE_CONCRETE_INSTRUCTION(CheckNonSmi, "check_non_smi")
 
  protected:
@@ -1990,14 +2031,13 @@
 
  private:
   Handle<Object> handle_;
-  HType constant_type_;
 
   // The following two values represent the int32 and the double value of the
   // given constant if there is a lossless conversion between the constant
   // and the specific representation.
-  bool has_int32_value_;
+  bool has_int32_value_ : 1;
+  bool has_double_value_ : 1;
   int32_t int32_value_;
-  bool has_double_value_;
   double double_value_;
 };
 
@@ -3408,6 +3448,29 @@
 };
 
 
+class HStringAdd: public HBinaryOperation {
+ public:
+  HStringAdd(HValue* left, HValue* right) : HBinaryOperation(left, right) {
+    set_representation(Representation::Tagged());
+    SetFlag(kUseGVN);
+    SetFlag(kDependsOnMaps);
+  }
+
+  virtual Representation RequiredInputRepresentation(int index) const {
+    return Representation::Tagged();
+  }
+
+  virtual HType CalculateInferredType() {
+    return HType::String();
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string_add")
+
+ protected:
+  virtual bool DataEquals(HValue* other) { return true; }
+};
+
+
 class HStringCharCodeAt: public HBinaryOperation {
  public:
   HStringCharCodeAt(HValue* string, HValue* index)
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 61496aa..f6c47f3 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -242,7 +242,7 @@
 
 
 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
-  if (!predecessors_.is_empty()) {
+  if (HasPredecessor()) {
     // Only loop header blocks can have a predecessor added after
     // instructions have been added to the block (they have phis for all
     // values in the environment, these phis may be eliminated later).
@@ -1808,7 +1808,7 @@
     ZoneList<Representation>* to_convert_reps) {
   Representation r = current->representation();
   if (r.IsNone()) return;
-  if (current->uses()->length() == 0) return;
+  if (current->uses()->is_empty()) return;
 
   // Collect the representation changes in a sorted list.  This allows
   // us to avoid duplicate changes without searching the list.
@@ -2076,37 +2076,17 @@
 
 
 // HGraphBuilder infrastructure for bailing out and checking bailouts.
-#define BAILOUT(reason)                         \
+#define CHECK_BAILOUT(call)                     \
   do {                                          \
-    Bailout(reason);                            \
-    return;                                     \
-  } while (false)
-
-
-#define CHECK_BAILOUT                           \
-  do {                                          \
+    call;                                       \
     if (HasStackOverflow()) return;             \
   } while (false)
 
 
-#define VISIT_FOR_EFFECT(expr)                  \
-  do {                                          \
-    VisitForEffect(expr);                       \
-    if (HasStackOverflow()) return;             \
-  } while (false)
-
-
-#define VISIT_FOR_VALUE(expr)                   \
-  do {                                          \
-    VisitForValue(expr);                        \
-    if (HasStackOverflow()) return;             \
-  } while (false)
-
-
-#define VISIT_FOR_CONTROL(expr, true_block, false_block)        \
+#define CHECK_ALIVE(call)                                       \
   do {                                                          \
-    VisitForControl(expr, true_block, false_block);             \
-    if (HasStackOverflow()) return;                             \
+    call;                                                       \
+    if (HasStackOverflow() || current_block() == NULL) return;  \
   } while (false)
 
 
@@ -2148,22 +2128,21 @@
 
 
 void HGraphBuilder::VisitArgument(Expression* expr) {
-  VISIT_FOR_VALUE(expr);
+  CHECK_ALIVE(VisitForValue(expr));
   Push(AddInstruction(new(zone()) HPushArgument(Pop())));
 }
 
 
 void HGraphBuilder::VisitArgumentList(ZoneList<Expression*>* arguments) {
   for (int i = 0; i < arguments->length(); i++) {
-    VisitArgument(arguments->at(i));
-    if (HasStackOverflow() || current_block() == NULL) return;
+    CHECK_ALIVE(VisitArgument(arguments->at(i)));
   }
 }
 
 
 void HGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs) {
   for (int i = 0; i < exprs->length(); ++i) {
-    VISIT_FOR_VALUE(exprs->at(i));
+    CHECK_ALIVE(VisitForValue(exprs->at(i)));
   }
 }
 
@@ -2294,7 +2273,7 @@
 
 void HGraphBuilder::SetupScope(Scope* scope) {
   // We don't yet handle the function name for named function expressions.
-  if (scope->function() != NULL) BAILOUT("named function expression");
+  if (scope->function() != NULL) return Bailout("named function expression");
 
   HConstant* undefined_constant = new(zone()) HConstant(
       isolate()->factory()->undefined_value(), Representation::Tagged());
@@ -2320,7 +2299,7 @@
     if (!scope->arguments()->IsStackAllocated() ||
         (scope->arguments_shadow() != NULL &&
         !scope->arguments_shadow()->IsStackAllocated())) {
-      BAILOUT("context-allocated arguments");
+      return Bailout("context-allocated arguments");
     }
     HArgumentsObject* object = new(zone()) HArgumentsObject;
     AddInstruction(object);
@@ -2335,8 +2314,7 @@
 
 void HGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
   for (int i = 0; i < statements->length(); i++) {
-    Visit(statements->at(i));
-    if (HasStackOverflow() || current_block() == NULL) break;
+    CHECK_ALIVE(Visit(statements->at(i)));
   }
 }
 
@@ -2358,10 +2336,12 @@
 
 
 void HGraphBuilder::VisitBlock(Block* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   BreakAndContinueInfo break_info(stmt);
   { BreakAndContinueScope push(&break_info, this);
-    VisitStatements(stmt->statements());
-    CHECK_BAILOUT;
+    CHECK_BAILOUT(VisitStatements(stmt->statements()));
   }
   HBasicBlock* break_block = break_info.break_block();
   if (break_block != NULL) {
@@ -2373,15 +2353,24 @@
 
 
 void HGraphBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   VisitForEffect(stmt->expression());
 }
 
 
 void HGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
 }
 
 
 void HGraphBuilder::VisitIfStatement(IfStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   if (stmt->condition()->ToBooleanIsTrue()) {
     AddSimulate(stmt->ThenId());
     Visit(stmt->then_statement());
@@ -2391,20 +2380,27 @@
   } else {
     HBasicBlock* cond_true = graph()->CreateBasicBlock();
     HBasicBlock* cond_false = graph()->CreateBasicBlock();
-    VISIT_FOR_CONTROL(stmt->condition(), cond_true, cond_false);
-    cond_true->SetJoinId(stmt->ThenId());
-    cond_false->SetJoinId(stmt->ElseId());
+    CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
 
-    set_current_block(cond_true);
-    Visit(stmt->then_statement());
-    CHECK_BAILOUT;
-    HBasicBlock* other = current_block();
+    if (cond_true->HasPredecessor()) {
+      cond_true->SetJoinId(stmt->ThenId());
+      set_current_block(cond_true);
+      CHECK_BAILOUT(Visit(stmt->then_statement()));
+      cond_true = current_block();
+    } else {
+      cond_true = NULL;
+    }
 
-    set_current_block(cond_false);
-    Visit(stmt->else_statement());
-    CHECK_BAILOUT;
+    if (cond_false->HasPredecessor()) {
+      cond_false->SetJoinId(stmt->ElseId());
+      set_current_block(cond_false);
+      CHECK_BAILOUT(Visit(stmt->else_statement()));
+      cond_false = current_block();
+    } else {
+      cond_false = NULL;
+    }
 
-    HBasicBlock* join = CreateJoin(other, current_block(), stmt->id());
+    HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->id());
     set_current_block(join);
   }
 }
@@ -2442,6 +2438,9 @@
 
 
 void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   HBasicBlock* continue_block = break_scope()->Get(stmt->target(), CONTINUE);
   current_block()->Goto(continue_block);
   set_current_block(NULL);
@@ -2449,6 +2448,9 @@
 
 
 void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   HBasicBlock* break_block = break_scope()->Get(stmt->target(), BREAK);
   current_block()->Goto(break_block);
   set_current_block(NULL);
@@ -2456,10 +2458,13 @@
 
 
 void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   AstContext* context = call_context();
   if (context == NULL) {
     // Not an inlined return, so an actual one.
-    VISIT_FOR_VALUE(stmt->expression());
+    CHECK_ALIVE(VisitForValue(stmt->expression()));
     HValue* result = environment()->Pop();
     current_block()->FinishExit(new(zone()) HReturn(result));
     set_current_block(NULL);
@@ -2472,11 +2477,11 @@
                       test->if_true(),
                       test->if_false());
     } else if (context->IsEffect()) {
-      VISIT_FOR_EFFECT(stmt->expression());
+      CHECK_ALIVE(VisitForEffect(stmt->expression()));
       current_block()->Goto(function_return(), false);
     } else {
       ASSERT(context->IsValue());
-      VISIT_FOR_VALUE(stmt->expression());
+      CHECK_ALIVE(VisitForValue(stmt->expression()));
       HValue* return_value = environment()->Pop();
       current_block()->AddLeaveInlined(return_value, function_return());
     }
@@ -2486,26 +2491,35 @@
 
 
 void HGraphBuilder::VisitWithEnterStatement(WithEnterStatement* stmt) {
-  BAILOUT("WithEnterStatement");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("WithEnterStatement");
 }
 
 
 void HGraphBuilder::VisitWithExitStatement(WithExitStatement* stmt) {
-  BAILOUT("WithExitStatement");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("WithExitStatement");
 }
 
 
 void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   // We only optimize switch statements with smi-literal smi comparisons,
   // with a bounded number of clauses.
   const int kCaseClauseLimit = 128;
   ZoneList<CaseClause*>* clauses = stmt->cases();
   int clause_count = clauses->length();
   if (clause_count > kCaseClauseLimit) {
-    BAILOUT("SwitchStatement: too many clauses");
+    return Bailout("SwitchStatement: too many clauses");
   }
 
-  VISIT_FOR_VALUE(stmt->tag());
+  CHECK_ALIVE(VisitForValue(stmt->tag()));
   AddSimulate(stmt->EntryId());
   HValue* tag_value = Pop();
   HBasicBlock* first_test_block = current_block();
@@ -2516,7 +2530,7 @@
     CaseClause* clause = clauses->at(i);
     if (clause->is_default()) continue;
     if (!clause->label()->IsSmiLiteral()) {
-      BAILOUT("SwitchStatement: non-literal switch label");
+      return Bailout("SwitchStatement: non-literal switch label");
     }
 
     // Unconditionally deoptimize on the first non-smi compare.
@@ -2528,7 +2542,7 @@
     }
 
     // Otherwise generate a compare and branch.
-    VISIT_FOR_VALUE(clause->label());
+    CHECK_ALIVE(VisitForValue(clause->label()));
     HValue* label_value = Pop();
     HCompare* compare =
         new(zone()) HCompare(tag_value, label_value, Token::EQ_STRICT);
@@ -2590,8 +2604,7 @@
         set_current_block(join);
       }
 
-      VisitStatements(clause->statements());
-      CHECK_BAILOUT;
+      CHECK_BAILOUT(VisitStatements(clause->statements()));
       fall_through_block = current_block();
     }
   }
@@ -2651,6 +2664,9 @@
 
 
 void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   ASSERT(current_block() != NULL);
   PreProcessOsrEntry(stmt);
   HBasicBlock* loop_entry = CreateLoopHeaderBlock();
@@ -2659,8 +2675,7 @@
 
   BreakAndContinueInfo break_info(stmt);
   { BreakAndContinueScope push(&break_info, this);
-    Visit(stmt->body());
-    CHECK_BAILOUT;
+    CHECK_BAILOUT(Visit(stmt->body()));
   }
   HBasicBlock* body_exit =
       JoinContinue(stmt, current_block(), break_info.continue_block());
@@ -2671,9 +2686,17 @@
     // back edge.
     body_exit = graph()->CreateBasicBlock();
     loop_successor = graph()->CreateBasicBlock();
-    VISIT_FOR_CONTROL(stmt->cond(), body_exit, loop_successor);
-    body_exit->SetJoinId(stmt->BackEdgeId());
-    loop_successor->SetJoinId(stmt->ExitId());
+    CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
+    if (body_exit->HasPredecessor()) {
+      body_exit->SetJoinId(stmt->BackEdgeId());
+    } else {
+      body_exit = NULL;
+    }
+    if (loop_successor->HasPredecessor()) {
+      loop_successor->SetJoinId(stmt->ExitId());
+    } else {
+      loop_successor = NULL;
+    }
   }
   HBasicBlock* loop_exit = CreateLoop(stmt,
                                       loop_entry,
@@ -2685,6 +2708,9 @@
 
 
 void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   ASSERT(current_block() != NULL);
   PreProcessOsrEntry(stmt);
   HBasicBlock* loop_entry = CreateLoopHeaderBlock();
@@ -2696,16 +2722,22 @@
   if (!stmt->cond()->ToBooleanIsTrue()) {
     HBasicBlock* body_entry = graph()->CreateBasicBlock();
     loop_successor = graph()->CreateBasicBlock();
-    VISIT_FOR_CONTROL(stmt->cond(), body_entry, loop_successor);
-    body_entry->SetJoinId(stmt->BodyId());
-    loop_successor->SetJoinId(stmt->ExitId());
-    set_current_block(body_entry);
+    CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
+    if (body_entry->HasPredecessor()) {
+      body_entry->SetJoinId(stmt->BodyId());
+      set_current_block(body_entry);
+    }
+    if (loop_successor->HasPredecessor()) {
+      loop_successor->SetJoinId(stmt->ExitId());
+    } else {
+      loop_successor = NULL;
+    }
   }
 
   BreakAndContinueInfo break_info(stmt);
-  { BreakAndContinueScope push(&break_info, this);
-    Visit(stmt->body());
-    CHECK_BAILOUT;
+  if (current_block() != NULL) {
+    BreakAndContinueScope push(&break_info, this);
+    CHECK_BAILOUT(Visit(stmt->body()));
   }
   HBasicBlock* body_exit =
       JoinContinue(stmt, current_block(), break_info.continue_block());
@@ -2719,9 +2751,11 @@
 
 
 void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   if (stmt->init() != NULL) {
-    Visit(stmt->init());
-    CHECK_BAILOUT;
+    CHECK_ALIVE(Visit(stmt->init()));
   }
   ASSERT(current_block() != NULL);
   PreProcessOsrEntry(stmt);
@@ -2733,24 +2767,29 @@
   if (stmt->cond() != NULL) {
     HBasicBlock* body_entry = graph()->CreateBasicBlock();
     loop_successor = graph()->CreateBasicBlock();
-    VISIT_FOR_CONTROL(stmt->cond(), body_entry, loop_successor);
-    body_entry->SetJoinId(stmt->BodyId());
-    loop_successor->SetJoinId(stmt->ExitId());
-    set_current_block(body_entry);
+    CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
+    if (body_entry->HasPredecessor()) {
+      body_entry->SetJoinId(stmt->BodyId());
+      set_current_block(body_entry);
+    }
+    if (loop_successor->HasPredecessor()) {
+      loop_successor->SetJoinId(stmt->ExitId());
+    } else {
+      loop_successor = NULL;
+    }
   }
 
   BreakAndContinueInfo break_info(stmt);
-  { BreakAndContinueScope push(&break_info, this);
-    Visit(stmt->body());
-    CHECK_BAILOUT;
+  if (current_block() != NULL) {
+    BreakAndContinueScope push(&break_info, this);
+    CHECK_BAILOUT(Visit(stmt->body()));
   }
   HBasicBlock* body_exit =
       JoinContinue(stmt, current_block(), break_info.continue_block());
 
   if (stmt->next() != NULL && body_exit != NULL) {
     set_current_block(body_exit);
-    Visit(stmt->next());
-    CHECK_BAILOUT;
+    CHECK_BAILOUT(Visit(stmt->next()));
     body_exit = current_block();
   }
 
@@ -2764,22 +2803,34 @@
 
 
 void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
-  BAILOUT("ForInStatement");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("ForInStatement");
 }
 
 
 void HGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
-  BAILOUT("TryCatchStatement");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("TryCatchStatement");
 }
 
 
 void HGraphBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
-  BAILOUT("TryFinallyStatement");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("TryFinallyStatement");
 }
 
 
 void HGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
-  BAILOUT("DebuggerStatement");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("DebuggerStatement");
 }
 
 
@@ -2804,13 +2855,17 @@
 
 
 void HGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   Handle<SharedFunctionInfo> shared_info =
       SearchSharedFunctionInfo(info()->shared_info()->code(),
                                expr);
   if (shared_info.is_null()) {
     shared_info = Compiler::BuildFunctionInfo(expr, info()->script());
   }
-  CHECK_BAILOUT;
+  // We also have a stack overflow if the recursive compilation did.
+  if (HasStackOverflow()) return;
   HFunctionLiteral* instr =
       new(zone()) HFunctionLiteral(shared_info, expr->pretenure());
   ast_context()->ReturnInstruction(instr, expr->id());
@@ -2819,32 +2874,47 @@
 
 void HGraphBuilder::VisitSharedFunctionInfoLiteral(
     SharedFunctionInfoLiteral* expr) {
-  BAILOUT("SharedFunctionInfoLiteral");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("SharedFunctionInfoLiteral");
 }
 
 
 void HGraphBuilder::VisitConditional(Conditional* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   HBasicBlock* cond_true = graph()->CreateBasicBlock();
   HBasicBlock* cond_false = graph()->CreateBasicBlock();
-  VISIT_FOR_CONTROL(expr->condition(), cond_true, cond_false);
-  cond_true->SetJoinId(expr->ThenId());
-  cond_false->SetJoinId(expr->ElseId());
+  CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
 
   // Visit the true and false subexpressions in the same AST context as the
   // whole expression.
-  set_current_block(cond_true);
-  Visit(expr->then_expression());
-  CHECK_BAILOUT;
-  HBasicBlock* other = current_block();
+  if (cond_true->HasPredecessor()) {
+    cond_true->SetJoinId(expr->ThenId());
+    set_current_block(cond_true);
+    CHECK_BAILOUT(Visit(expr->then_expression()));
+    cond_true = current_block();
+  } else {
+    cond_true = NULL;
+  }
 
-  set_current_block(cond_false);
-  Visit(expr->else_expression());
-  CHECK_BAILOUT;
+  if (cond_false->HasPredecessor()) {
+    cond_false->SetJoinId(expr->ElseId());
+    set_current_block(cond_false);
+    CHECK_BAILOUT(Visit(expr->else_expression()));
+    cond_false = current_block();
+  } else {
+    cond_false = NULL;
+  }
 
   if (!ast_context()->IsTest()) {
-    HBasicBlock* join = CreateJoin(other, current_block(), expr->id());
+    HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
     set_current_block(join);
-    if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
+    if (join != NULL && !ast_context()->IsEffect()) {
+      ast_context()->ReturnValue(Pop());
+    }
   }
 }
 
@@ -2881,17 +2951,20 @@
 
 
 void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   Variable* variable = expr->AsVariable();
   if (variable == NULL) {
-    BAILOUT("reference to rewritten variable");
+    return Bailout("reference to rewritten variable");
   } else if (variable->IsStackAllocated()) {
     if (environment()->Lookup(variable)->CheckFlag(HValue::kIsArguments)) {
-      BAILOUT("unsupported context for arguments object");
+      return Bailout("unsupported context for arguments object");
     }
     ast_context()->ReturnValue(environment()->Lookup(variable));
   } else if (variable->IsContextSlot()) {
     if (variable->mode() == Variable::CONST) {
-      BAILOUT("reference to const context slot");
+      return Bailout("reference to const context slot");
     }
     HValue* context = BuildContextChainWalk(variable);
     int index = variable->AsSlot()->index();
@@ -2927,12 +3000,15 @@
       ast_context()->ReturnInstruction(instr, expr->id());
     }
   } else {
-    BAILOUT("reference to a variable which requires dynamic lookup");
+    return Bailout("reference to a variable which requires dynamic lookup");
   }
 }
 
 
 void HGraphBuilder::VisitLiteral(Literal* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   HConstant* instr =
       new(zone()) HConstant(expr->handle(), Representation::Tagged());
   ast_context()->ReturnInstruction(instr, expr->id());
@@ -2940,6 +3016,9 @@
 
 
 void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   HRegExpLiteral* instr = new(zone()) HRegExpLiteral(expr->pattern(),
                                                      expr->flags(),
                                                      expr->literal_index());
@@ -2948,6 +3027,9 @@
 
 
 void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HObjectLiteral* literal =
@@ -2977,7 +3059,7 @@
       case ObjectLiteral::Property::COMPUTED:
         if (key->handle()->IsSymbol()) {
           if (property->emit_store()) {
-            VISIT_FOR_VALUE(value);
+            CHECK_ALIVE(VisitForValue(value));
             HValue* value = Pop();
             Handle<String> name = Handle<String>::cast(key->handle());
             HStoreNamedGeneric* store =
@@ -2990,7 +3072,7 @@
             AddInstruction(store);
             AddSimulate(key->id());
           } else {
-            VISIT_FOR_EFFECT(value);
+            CHECK_ALIVE(VisitForEffect(value));
           }
           break;
         }
@@ -2998,7 +3080,7 @@
       case ObjectLiteral::Property::PROTOTYPE:
       case ObjectLiteral::Property::SETTER:
       case ObjectLiteral::Property::GETTER:
-        BAILOUT("Object literal with complex property");
+        return Bailout("Object literal with complex property");
       default: UNREACHABLE();
     }
   }
@@ -3019,6 +3101,9 @@
 
 
 void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   ZoneList<Expression*>* subexprs = expr->values();
   int length = subexprs->length();
 
@@ -3038,9 +3123,9 @@
     // is already set in the cloned array.
     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
 
-    VISIT_FOR_VALUE(subexpr);
+    CHECK_ALIVE(VisitForValue(subexpr));
     HValue* value = Pop();
-    if (!Smi::IsValid(i)) BAILOUT("Non-smi key in array literal");
+    if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
 
     // Load the elements array before the first store.
     if (elements == NULL)  {
@@ -3059,7 +3144,10 @@
 
 
 void HGraphBuilder::VisitCatchExtensionObject(CatchExtensionObject* expr) {
-  BAILOUT("CatchExtensionObject");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("CatchExtensionObject");
 }
 
 
@@ -3238,14 +3326,14 @@
   Property* prop = expr->target()->AsProperty();
   ASSERT(prop != NULL);
   expr->RecordTypeFeedback(oracle());
-  VISIT_FOR_VALUE(prop->obj());
+  CHECK_ALIVE(VisitForValue(prop->obj()));
 
   HValue* value = NULL;
   HInstruction* instr = NULL;
 
   if (prop->key()->IsPropertyName()) {
     // Named store.
-    VISIT_FOR_VALUE(expr->value());
+    CHECK_ALIVE(VisitForValue(expr->value()));
     value = Pop();
     HValue* object = Pop();
 
@@ -3269,8 +3357,8 @@
 
   } else {
     // Keyed store.
-    VISIT_FOR_VALUE(prop->key());
-    VISIT_FOR_VALUE(expr->value());
+    CHECK_ALIVE(VisitForValue(prop->key()));
+    CHECK_ALIVE(VisitForValue(expr->value()));
     value = Pop();
     HValue* key = Pop();
     HValue* object = Pop();
@@ -3332,7 +3420,7 @@
   BinaryOperation* operation = expr->binary_operation();
 
   if (var != NULL) {
-    VISIT_FOR_VALUE(operation);
+    CHECK_ALIVE(VisitForValue(operation));
 
     if (var->is_global()) {
       HandleGlobalVariableAssignment(var,
@@ -3349,7 +3437,7 @@
       AddInstruction(instr);
       if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
     } else {
-      BAILOUT("compound assignment to lookup slot");
+      return Bailout("compound assignment to lookup slot");
     }
     ast_context()->ReturnValue(Pop());
 
@@ -3358,7 +3446,7 @@
 
     if (prop->key()->IsPropertyName()) {
       // Named property.
-      VISIT_FOR_VALUE(prop->obj());
+      CHECK_ALIVE(VisitForValue(prop->obj()));
       HValue* obj = Top();
 
       HInstruction* load = NULL;
@@ -3372,7 +3460,7 @@
       PushAndAdd(load);
       if (load->HasSideEffects()) AddSimulate(expr->CompoundLoadId());
 
-      VISIT_FOR_VALUE(expr->value());
+      CHECK_ALIVE(VisitForValue(expr->value()));
       HValue* right = Pop();
       HValue* left = Pop();
 
@@ -3390,8 +3478,8 @@
 
     } else {
       // Keyed property.
-      VISIT_FOR_VALUE(prop->obj());
-      VISIT_FOR_VALUE(prop->key());
+      CHECK_ALIVE(VisitForValue(prop->obj()));
+      CHECK_ALIVE(VisitForValue(prop->key()));
       HValue* obj = environment()->ExpressionStackAt(1);
       HValue* key = environment()->ExpressionStackAt(0);
 
@@ -3399,7 +3487,7 @@
       PushAndAdd(load);
       if (load->HasSideEffects()) AddSimulate(expr->CompoundLoadId());
 
-      VISIT_FOR_VALUE(expr->value());
+      CHECK_ALIVE(VisitForValue(expr->value()));
       HValue* right = Pop();
       HValue* left = Pop();
 
@@ -3418,12 +3506,15 @@
     }
 
   } else {
-    BAILOUT("invalid lhs in compound assignment");
+    return Bailout("invalid lhs in compound assignment");
   }
 }
 
 
 void HGraphBuilder::VisitAssignment(Assignment* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   VariableProxy* proxy = expr->target()->AsVariableProxy();
   Variable* var = proxy->AsVariable();
   Property* prop = expr->target()->AsProperty();
@@ -3435,7 +3526,7 @@
   }
 
   if (var != NULL) {
-    if (proxy->IsArguments()) BAILOUT("assignment to arguments");
+    if (proxy->IsArguments()) return Bailout("assignment to arguments");
 
     // Handle the assignment.
     if (var->IsStackAllocated()) {
@@ -3449,14 +3540,14 @@
       if (rhs_var != NULL && rhs_var->IsStackAllocated()) {
         value = environment()->Lookup(rhs_var);
       } else {
-        VISIT_FOR_VALUE(expr->value());
+        CHECK_ALIVE(VisitForValue(expr->value()));
         value = Pop();
       }
       Bind(var, value);
       ast_context()->ReturnValue(value);
 
     } else if (var->IsContextSlot() && var->mode() != Variable::CONST) {
-      VISIT_FOR_VALUE(expr->value());
+      CHECK_ALIVE(VisitForValue(expr->value()));
       HValue* context = BuildContextChainWalk(var);
       int index = var->AsSlot()->index();
       HStoreContextSlot* instr =
@@ -3466,7 +3557,7 @@
       ast_context()->ReturnValue(Pop());
 
     } else if (var->is_global()) {
-      VISIT_FOR_VALUE(expr->value());
+      CHECK_ALIVE(VisitForValue(expr->value()));
       HandleGlobalVariableAssignment(var,
                                      Top(),
                                      expr->position(),
@@ -3474,23 +3565,26 @@
       ast_context()->ReturnValue(Pop());
 
     } else {
-      BAILOUT("assignment to LOOKUP or const CONTEXT variable");
+      return Bailout("assignment to LOOKUP or const CONTEXT variable");
     }
 
   } else if (prop != NULL) {
     HandlePropertyAssignment(expr);
   } else {
-    BAILOUT("invalid left-hand side in assignment");
+    return Bailout("invalid left-hand side in assignment");
   }
 }
 
 
 void HGraphBuilder::VisitThrow(Throw* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   // We don't optimize functions with invalid left-hand sides in
   // assignments, count operations, or for-in.  Consequently throw can
   // currently only occur in an effect context.
   ASSERT(ast_context()->IsEffect());
-  VISIT_FOR_VALUE(expr->exception());
+  CHECK_ALIVE(VisitForValue(expr->exception()));
 
   HValue* value = environment()->Pop();
   HThrow* instr = new(zone()) HThrow(value);
@@ -3738,7 +3832,7 @@
   } else {
     Push(graph()->GetArgumentsObject());
     VisitForValue(expr->key());
-    if (HasStackOverflow()) return false;
+    if (HasStackOverflow() || current_block() == NULL) return true;
     HValue* key = Pop();
     Drop(1);  // Arguments object.
     HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
@@ -3753,12 +3847,14 @@
 
 
 void HGraphBuilder::VisitProperty(Property* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   expr->RecordTypeFeedback(oracle());
 
   if (TryArgumentsAccess(expr)) return;
-  CHECK_BAILOUT;
 
-  VISIT_FOR_VALUE(expr->obj());
+  CHECK_ALIVE(VisitForValue(expr->obj()));
 
   HInstruction* instr = NULL;
   if (expr->IsArrayLength()) {
@@ -3777,7 +3873,7 @@
                                                   LAST_STRING_TYPE));
     instr = new(zone()) HStringLength(string);
   } else if (expr->IsStringAccess()) {
-    VISIT_FOR_VALUE(expr->key());
+    CHECK_ALIVE(VisitForValue(expr->key()));
     HValue* index = Pop();
     HValue* string = Pop();
     HStringCharCodeAt* char_code = BuildStringCharCodeAt(string, index);
@@ -3804,7 +3900,7 @@
     }
 
   } else {
-    VISIT_FOR_VALUE(expr->key());
+    CHECK_ALIVE(VisitForValue(expr->key()));
 
     HValue* key = Pop();
     HValue* obj = Pop();
@@ -3865,10 +3961,11 @@
         PrintF("Trying to inline the polymorphic call to %s\n",
                *name->ToCString());
       }
-      if (!FLAG_polymorphic_inlining || !TryInline(expr)) {
-        // Check for bailout, as trying to inline might fail due to bailout
-        // during hydrogen processing.
-        CHECK_BAILOUT;
+      if (FLAG_polymorphic_inlining && TryInline(expr)) {
+        // Trying to inline will signal that we should bailout from the
+        // entire compilation by setting stack overflow on the visitor.
+        if (HasStackOverflow()) return;
+      } else {
         HCallConstantFunction* call =
             new(zone()) HCallConstantFunction(expr->target(), argument_count);
         call->set_position(expr->position());
@@ -3908,10 +4005,12 @@
   // even without predecessors to the join block, we set it as the exit
   // block and continue by adding instructions there.
   ASSERT(join != NULL);
-  set_current_block(join);
   if (join->HasPredecessor()) {
+    set_current_block(join);
     join->SetJoinId(expr->id());
     if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
+  } else {
+    set_current_block(NULL);
   }
 }
 
@@ -4083,7 +4182,7 @@
     // Bail out if the inline function did, as we cannot residualize a call
     // instead.
     TraceInline(target, "inline graph construction failed");
-    return false;
+    return true;
   }
 
   // Update inlined nodes count.
@@ -4139,9 +4238,11 @@
     // flow to handle.
     set_current_block(NULL);
 
-  } else {
+  } else if (function_return()->HasPredecessor()) {
     function_return()->SetJoinId(expr->id());
     set_current_block(function_return());
+  } else {
+    set_current_block(NULL);
   }
 
   return true;
@@ -4267,10 +4368,10 @@
 
   // Found pattern f.apply(receiver, arguments).
   VisitForValue(prop->obj());
-  if (HasStackOverflow()) return false;
+  if (HasStackOverflow() || current_block() == NULL) return true;
   HValue* function = Pop();
   VisitForValue(args->at(0));
-  if (HasStackOverflow()) return false;
+  if (HasStackOverflow() || current_block() == NULL) return true;
   HValue* receiver = Pop();
   HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
   HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
@@ -4287,6 +4388,9 @@
 
 
 void HGraphBuilder::VisitCall(Call* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   Expression* callee = expr->expression();
   int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
   HInstruction* call = NULL;
@@ -4295,17 +4399,16 @@
   if (prop != NULL) {
     if (!prop->key()->IsPropertyName()) {
       // Keyed function call.
-      VISIT_FOR_VALUE(prop->obj());
+      CHECK_ALIVE(VisitForValue(prop->obj()));
 
-      VISIT_FOR_VALUE(prop->key());
+      CHECK_ALIVE(VisitForValue(prop->key()));
       // Push receiver and key like the non-optimized code generator expects it.
       HValue* key = Pop();
       HValue* receiver = Pop();
       Push(key);
       Push(receiver);
 
-      VisitExpressions(expr->arguments());
-      CHECK_BAILOUT;
+      CHECK_ALIVE(VisitExpressions(expr->arguments()));
 
       HContext* context = new(zone()) HContext;
       AddInstruction(context);
@@ -4321,11 +4424,9 @@
     expr->RecordTypeFeedback(oracle());
 
     if (TryCallApply(expr)) return;
-    CHECK_BAILOUT;
 
-    VISIT_FOR_VALUE(prop->obj());
-    VisitExpressions(expr->arguments());
-    CHECK_BAILOUT;
+    CHECK_ALIVE(VisitForValue(prop->obj()));
+    CHECK_ALIVE(VisitExpressions(expr->arguments()));
 
     Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
 
@@ -4356,16 +4457,10 @@
       } else {
         AddCheckConstantFunction(expr, receiver, receiver_map, true);
 
-        if (TryInline(expr)) {
-          return;
-        } else {
-          // Check for bailout, as the TryInline call in the if condition above
-          // might return false due to bailout during hydrogen processing.
-          CHECK_BAILOUT;
-          call = PreProcessCall(
-              new(zone()) HCallConstantFunction(expr->target(),
-                                                argument_count));
-        }
+        if (TryInline(expr)) return;
+        call = PreProcessCall(
+            new(zone()) HCallConstantFunction(expr->target(),
+                                              argument_count));
       }
     } else if (types != NULL && types->length() > 1) {
       ASSERT(expr->check_type() == RECEIVER_MAP_CHECK);
@@ -4385,7 +4480,7 @@
 
     if (!global_call) {
       ++argument_count;
-      VISIT_FOR_VALUE(expr->expression());
+      CHECK_ALIVE(VisitForValue(expr->expression()));
     }
 
     if (global_call) {
@@ -4407,10 +4502,9 @@
         HGlobalObject* global_object = new(zone()) HGlobalObject(context);
         AddInstruction(context);
         PushAndAdd(global_object);
-        VisitExpressions(expr->arguments());
-        CHECK_BAILOUT;
+        CHECK_ALIVE(VisitExpressions(expr->arguments()));
 
-        VISIT_FOR_VALUE(expr->expression());
+        CHECK_ALIVE(VisitForValue(expr->expression()));
         HValue* function = Pop();
         AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
 
@@ -4424,21 +4518,14 @@
                IsGlobalObject());
         environment()->SetExpressionStackAt(receiver_index, global_receiver);
 
-        if (TryInline(expr)) {
-          return;
-        }
-        // Check for bailout, as trying to inline might fail due to bailout
-        // during hydrogen processing.
-        CHECK_BAILOUT;
-
+        if (TryInline(expr)) return;
         call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
-                                                   argument_count));
+                                                           argument_count));
       } else {
         HContext* context = new(zone()) HContext;
         AddInstruction(context);
         PushAndAdd(new(zone()) HGlobalObject(context));
-        VisitExpressions(expr->arguments());
-        CHECK_BAILOUT;
+        CHECK_ALIVE(VisitExpressions(expr->arguments()));
 
         call = PreProcessCall(new(zone()) HCallGlobal(context,
                                               var->name(),
@@ -4451,8 +4538,7 @@
       AddInstruction(context);
       AddInstruction(global_object);
       PushAndAdd(new(zone()) HGlobalReceiver(global_object));
-      VisitExpressions(expr->arguments());
-      CHECK_BAILOUT;
+      CHECK_ALIVE(VisitExpressions(expr->arguments()));
 
       call = PreProcessCall(new(zone()) HCallFunction(context, argument_count));
     }
@@ -4464,11 +4550,13 @@
 
 
 void HGraphBuilder::VisitCallNew(CallNew* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   // The constructor function is also used as the receiver argument to the
   // JS construct call builtin.
-  VISIT_FOR_VALUE(expr->expression());
-  VisitExpressions(expr->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitForValue(expr->expression()));
+  CHECK_ALIVE(VisitExpressions(expr->arguments()));
 
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
@@ -4500,8 +4588,11 @@
 
 
 void HGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   if (expr->is_jsruntime()) {
-    BAILOUT("call to a JavaScript runtime function");
+    return Bailout("call to a JavaScript runtime function");
   }
 
   const Runtime::Function* function = expr->function();
@@ -4521,8 +4612,7 @@
     (this->*generator)(expr);
   } else {
     ASSERT(function->intrinsic_type == Runtime::RUNTIME);
-    VisitArgumentList(expr->arguments());
-    CHECK_BAILOUT;
+    CHECK_ALIVE(VisitArgumentList(expr->arguments()));
 
     Handle<String> name = expr->name();
     int argument_count = expr->arguments()->length();
@@ -4536,9 +4626,12 @@
 
 
 void HGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   Token::Value op = expr->op();
   if (op == Token::VOID) {
-    VISIT_FOR_EFFECT(expr->expression());
+    CHECK_ALIVE(VisitForEffect(expr->expression()));
     ast_context()->ReturnValue(graph()->GetConstantUndefined());
   } else if (op == Token::DELETE) {
     Property* prop = expr->expression()->AsProperty();
@@ -4546,7 +4639,7 @@
     if (prop == NULL && var == NULL) {
       // Result of deleting non-property, non-variable reference is true.
       // Evaluate the subexpression for side effects.
-      VISIT_FOR_EFFECT(expr->expression());
+      CHECK_ALIVE(VisitForEffect(expr->expression()));
       ast_context()->ReturnValue(graph()->GetConstantTrue());
     } else if (var != NULL &&
                !var->is_global() &&
@@ -4561,17 +4654,17 @@
         // to accesses on the arguments object.
         ast_context()->ReturnValue(graph()->GetConstantFalse());
       } else {
-        VISIT_FOR_VALUE(prop->obj());
-        VISIT_FOR_VALUE(prop->key());
+        CHECK_ALIVE(VisitForValue(prop->obj()));
+        CHECK_ALIVE(VisitForValue(prop->key()));
         HValue* key = Pop();
         HValue* obj = Pop();
         HDeleteProperty* instr = new(zone()) HDeleteProperty(obj, key);
         ast_context()->ReturnInstruction(instr, expr->id());
       }
     } else if (var->is_global()) {
-      BAILOUT("delete with global variable");
+      return Bailout("delete with global variable");
     } else {
-      BAILOUT("delete with non-global variable");
+      return Bailout("delete with non-global variable");
     }
   } else if (op == Token::NOT) {
     if (ast_context()->IsTest()) {
@@ -4582,34 +4675,42 @@
     } else if (ast_context()->IsValue()) {
       HBasicBlock* materialize_false = graph()->CreateBasicBlock();
       HBasicBlock* materialize_true = graph()->CreateBasicBlock();
-      VISIT_FOR_CONTROL(expr->expression(),
-                        materialize_false,
-                        materialize_true);
-      materialize_false->SetJoinId(expr->expression()->id());
-      materialize_true->SetJoinId(expr->expression()->id());
+      CHECK_BAILOUT(VisitForControl(expr->expression(),
+                                    materialize_false,
+                                    materialize_true));
 
-      set_current_block(materialize_false);
-      Push(graph()->GetConstantFalse());
-      set_current_block(materialize_true);
-      Push(graph()->GetConstantTrue());
+      if (materialize_false->HasPredecessor()) {
+        materialize_false->SetJoinId(expr->expression()->id());
+        set_current_block(materialize_false);
+        Push(graph()->GetConstantFalse());
+      } else {
+        materialize_false = NULL;
+      }
+
+      if (materialize_true->HasPredecessor()) {
+        materialize_true->SetJoinId(expr->expression()->id());
+        set_current_block(materialize_true);
+        Push(graph()->GetConstantTrue());
+      } else {
+        materialize_true = NULL;
+      }
 
       HBasicBlock* join =
           CreateJoin(materialize_false, materialize_true, expr->id());
       set_current_block(join);
-      ast_context()->ReturnValue(Pop());
+      if (join != NULL) ast_context()->ReturnValue(Pop());
     } else {
       ASSERT(ast_context()->IsEffect());
       VisitForEffect(expr->expression());
     }
 
   } else if (op == Token::TYPEOF) {
-    VisitForTypeOf(expr->expression());
-    if (HasStackOverflow()) return;
+    CHECK_ALIVE(VisitForTypeOf(expr->expression()));
     HValue* value = Pop();
     ast_context()->ReturnInstruction(new(zone()) HTypeof(value), expr->id());
 
   } else {
-    VISIT_FOR_VALUE(expr->expression());
+    CHECK_ALIVE(VisitForValue(expr->expression()));
     HValue* value = Pop();
     HInstruction* instr = NULL;
     switch (op) {
@@ -4623,7 +4724,7 @@
         instr = new(zone()) HMul(value, graph_->GetConstant1());
         break;
       default:
-        BAILOUT("Value: unsupported unary operation");
+        return Bailout("Value: unsupported unary operation");
         break;
     }
     ast_context()->ReturnInstruction(instr, expr->id());
@@ -4642,6 +4743,9 @@
 
 
 void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   Expression* target = expr->expression();
   VariableProxy* proxy = target->AsVariableProxy();
   Variable* var = proxy->AsVariable();
@@ -4650,7 +4754,7 @@
   bool inc = expr->op() == Token::INC;
 
   if (var != NULL) {
-    VISIT_FOR_VALUE(target);
+    CHECK_ALIVE(VisitForValue(target));
 
     // Match the full code generator stack by simulating an extra stack
     // element for postfix operations in a non-effect context.
@@ -4675,7 +4779,7 @@
       AddInstruction(instr);
       if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
     } else {
-      BAILOUT("lookup variable in count operation");
+      return Bailout("lookup variable in count operation");
     }
     Drop(has_extra ? 2 : 1);
     ast_context()->ReturnValue(expr->is_postfix() ? before : after);
@@ -4691,7 +4795,7 @@
       bool has_extra = expr->is_postfix() && !ast_context()->IsEffect();
       if (has_extra) Push(graph_->GetConstantUndefined());
 
-      VISIT_FOR_VALUE(prop->obj());
+      CHECK_ALIVE(VisitForValue(prop->obj()));
       HValue* obj = Top();
 
       HInstruction* load = NULL;
@@ -4732,8 +4836,8 @@
       bool has_extra = expr->is_postfix() && !ast_context()->IsEffect();
       if (has_extra) Push(graph_->GetConstantUndefined());
 
-      VISIT_FOR_VALUE(prop->obj());
-      VISIT_FOR_VALUE(prop->key());
+      CHECK_ALIVE(VisitForValue(prop->obj()));
+      CHECK_ALIVE(VisitForValue(prop->key()));
       HValue* obj = environment()->ExpressionStackAt(1);
       HValue* key = environment()->ExpressionStackAt(0);
 
@@ -4764,7 +4868,7 @@
     }
 
   } else {
-    BAILOUT("invalid lhs in count operation");
+    return Bailout("invalid lhs in count operation");
   }
 }
 
@@ -4784,10 +4888,21 @@
 HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
                                                   HValue* left,
                                                   HValue* right) {
+  TypeInfo info = oracle()->BinaryType(expr);
   HInstruction* instr = NULL;
   switch (expr->op()) {
     case Token::ADD:
-      instr = new(zone()) HAdd(left, right);
+      if (info.IsString()) {
+        AddInstruction(new(zone()) HCheckNonSmi(left));
+        AddInstruction(new(zone()) HCheckInstanceType(
+            left, FIRST_STRING_TYPE, LAST_STRING_TYPE));
+        AddInstruction(new(zone()) HCheckNonSmi(right));
+        AddInstruction(new(zone()) HCheckInstanceType(
+            right, FIRST_STRING_TYPE, LAST_STRING_TYPE));
+        instr = new(zone()) HStringAdd(left, right);
+      } else {
+        instr = new(zone()) HAdd(left, right);
+      }
       break;
     case Token::SUB:
       instr = new(zone()) HSub(left, right);
@@ -4822,7 +4937,6 @@
     default:
       UNREACHABLE();
   }
-  TypeInfo info = oracle()->BinaryType(expr);
   // If we hit an uninitialized binary op stub we will get type info
   // for a smi operation. If one of the operands is a constant string
   // do not generate code assuming it is a smi operation.
@@ -4859,8 +4973,11 @@
 
 
 void HGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   if (expr->op() == Token::COMMA) {
-    VISIT_FOR_EFFECT(expr->left());
+    CHECK_ALIVE(VisitForEffect(expr->left()));
     // Visit the right subexpression in the same AST context as the entire
     // expression.
     Visit(expr->right());
@@ -4872,19 +4989,25 @@
       // Translate left subexpression.
       HBasicBlock* eval_right = graph()->CreateBasicBlock();
       if (is_logical_and) {
-        VISIT_FOR_CONTROL(expr->left(), eval_right, context->if_false());
+        CHECK_BAILOUT(VisitForControl(expr->left(),
+                                      eval_right,
+                                      context->if_false()));
       } else {
-        VISIT_FOR_CONTROL(expr->left(), context->if_true(), eval_right);
+        CHECK_BAILOUT(VisitForControl(expr->left(),
+                                      context->if_true(),
+                                      eval_right));
       }
-      eval_right->SetJoinId(expr->RightId());
 
       // Translate right subexpression by visiting it in the same AST
       // context as the entire expression.
-      set_current_block(eval_right);
-      Visit(expr->right());
+      if (eval_right->HasPredecessor()) {
+        eval_right->SetJoinId(expr->RightId());
+        set_current_block(eval_right);
+        Visit(expr->right());
+      }
 
     } else if (ast_context()->IsValue()) {
-      VISIT_FOR_VALUE(expr->left());
+      CHECK_ALIVE(VisitForValue(expr->left()));
       ASSERT(current_block() != NULL);
 
       // We need an extra block to maintain edge-split form.
@@ -4897,7 +5020,7 @@
 
       set_current_block(eval_right);
       Drop(1);  // Value of the left subexpression.
-      VISIT_FOR_VALUE(expr->right());
+      CHECK_BAILOUT(VisitForValue(expr->right()));
 
       HBasicBlock* join_block =
           CreateJoin(empty_block, current_block(), expr->id());
@@ -4911,33 +5034,42 @@
       // extra block to maintain edge-split form.
       HBasicBlock* empty_block = graph()->CreateBasicBlock();
       HBasicBlock* right_block = graph()->CreateBasicBlock();
-      HBasicBlock* join_block = graph()->CreateBasicBlock();
       if (is_logical_and) {
-        VISIT_FOR_CONTROL(expr->left(), right_block, empty_block);
+        CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
       } else {
-        VISIT_FOR_CONTROL(expr->left(), empty_block, right_block);
+        CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
       }
+
       // TODO(kmillikin): Find a way to fix this.  It's ugly that there are
       // actually two empty blocks (one here and one inserted by
       // TestContext::BuildBranch, and that they both have an HSimulate
       // though the second one is not a merge node, and that we really have
       // no good AST ID to put on that first HSimulate.
-      empty_block->SetJoinId(expr->id());
-      right_block->SetJoinId(expr->RightId());
-      set_current_block(right_block);
-      VISIT_FOR_EFFECT(expr->right());
+      if (empty_block->HasPredecessor()) {
+        empty_block->SetJoinId(expr->id());
+      } else {
+        empty_block = NULL;
+      }
 
-      empty_block->Goto(join_block);
-      current_block()->Goto(join_block);
-      join_block->SetJoinId(expr->id());
+      if (right_block->HasPredecessor()) {
+        right_block->SetJoinId(expr->RightId());
+        set_current_block(right_block);
+        CHECK_BAILOUT(VisitForEffect(expr->right()));
+        right_block = current_block();
+      } else {
+        right_block = NULL;
+      }
+
+      HBasicBlock* join_block =
+          CreateJoin(empty_block, right_block, expr->id());
       set_current_block(join_block);
       // We did not materialize any value in the predecessor environments,
       // so there is no need to handle it here.
     }
 
   } else {
-    VISIT_FOR_VALUE(expr->left());
-    VISIT_FOR_VALUE(expr->right());
+    CHECK_ALIVE(VisitForValue(expr->left()));
+    CHECK_ALIVE(VisitForValue(expr->right()));
 
     HValue* right = Pop();
     HValue* left = Pop();
@@ -4976,9 +5108,12 @@
 
 
 void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
   if (IsClassOfTest(expr)) {
     CallRuntime* call = expr->left()->AsCallRuntime();
-    VISIT_FOR_VALUE(call->arguments()->at(0));
+    CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
     HValue* value = Pop();
     Literal* literal = expr->right()->AsLiteral();
     Handle<String> rhs = Handle<String>::cast(literal->handle());
@@ -4994,8 +5129,7 @@
   if ((expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT) &&
       left_unary != NULL && left_unary->op() == Token::TYPEOF &&
       right_literal != NULL && right_literal->handle()->IsString()) {
-    VisitForTypeOf(left_unary->expression());
-    if (HasStackOverflow()) return;
+    CHECK_ALIVE(VisitForTypeOf(left_unary->expression()));
     HValue* left = Pop();
     HInstruction* instr = new(zone()) HTypeofIs(left,
         Handle<String>::cast(right_literal->handle()));
@@ -5004,8 +5138,8 @@
     return;
   }
 
-  VISIT_FOR_VALUE(expr->left());
-  VISIT_FOR_VALUE(expr->right());
+  CHECK_ALIVE(VisitForValue(expr->left()));
+  CHECK_ALIVE(VisitForValue(expr->right()));
 
   HValue* right = Pop();
   HValue* left = Pop();
@@ -5050,7 +5184,7 @@
       instr = new(zone()) HInstanceOfKnownGlobal(left, target);
     }
   } else if (op == Token::IN) {
-    BAILOUT("Unsupported comparison: in");
+    return Bailout("Unsupported comparison: in");
   } else if (type_info.IsNonPrimitive()) {
     switch (op) {
       case Token::EQ:
@@ -5063,7 +5197,7 @@
         break;
       }
       default:
-        BAILOUT("Unsupported non-primitive compare");
+        return Bailout("Unsupported non-primitive compare");
         break;
     }
   } else {
@@ -5078,7 +5212,10 @@
 
 
 void HGraphBuilder::VisitCompareToNull(CompareToNull* expr) {
-  VISIT_FOR_VALUE(expr->expression());
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  CHECK_ALIVE(VisitForValue(expr->expression()));
 
   HValue* value = Pop();
   HIsNull* compare = new(zone()) HIsNull(value, expr->is_strict());
@@ -5087,7 +5224,10 @@
 
 
 void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
-  BAILOUT("ThisFunction");
+  ASSERT(!HasStackOverflow());
+  ASSERT(current_block() != NULL);
+  ASSERT(current_block()->HasPredecessor());
+  return Bailout("ThisFunction");
 }
 
 
@@ -5102,7 +5242,7 @@
       (slot != NULL && slot->type() == Slot::LOOKUP) ||
       decl->mode() == Variable::CONST ||
       decl->fun() != NULL) {
-    BAILOUT("unsupported declaration");
+    return Bailout("unsupported declaration");
   }
 }
 
@@ -5111,7 +5251,7 @@
 // Support for types.
 void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HIsSmi* result = new(zone()) HIsSmi(value);
   ast_context()->ReturnInstruction(result, call->id());
@@ -5120,7 +5260,7 @@
 
 void HGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HHasInstanceType* result =
       new(zone()) HHasInstanceType(value, FIRST_JS_OBJECT_TYPE, LAST_TYPE);
@@ -5130,7 +5270,7 @@
 
 void HGraphBuilder::GenerateIsFunction(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HHasInstanceType* result =
       new(zone()) HHasInstanceType(value, JS_FUNCTION_TYPE);
@@ -5140,7 +5280,7 @@
 
 void HGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HHasCachedArrayIndex* result = new(zone()) HHasCachedArrayIndex(value);
   ast_context()->ReturnInstruction(result, call->id());
@@ -5149,7 +5289,7 @@
 
 void HGraphBuilder::GenerateIsArray(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HHasInstanceType* result = new(zone()) HHasInstanceType(value, JS_ARRAY_TYPE);
   ast_context()->ReturnInstruction(result, call->id());
@@ -5158,7 +5298,7 @@
 
 void HGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HHasInstanceType* result =
       new(zone()) HHasInstanceType(value, JS_REGEXP_TYPE);
@@ -5168,7 +5308,7 @@
 
 void HGraphBuilder::GenerateIsObject(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HIsObject* test = new(zone()) HIsObject(value);
   ast_context()->ReturnInstruction(test, call->id());
@@ -5176,18 +5316,19 @@
 
 
 void HGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
-  BAILOUT("inlined runtime function: IsNonNegativeSmi");
+  return Bailout("inlined runtime function: IsNonNegativeSmi");
 }
 
 
 void HGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
-  BAILOUT("inlined runtime function: IsUndetectableObject");
+  return Bailout("inlined runtime function: IsUndetectableObject");
 }
 
 
 void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
     CallRuntime* call) {
-  BAILOUT("inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
+  return Bailout(
+      "inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
 }
 
 
@@ -5216,7 +5357,7 @@
 
 void HGraphBuilder::GenerateArguments(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* index = Pop();
   HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
   HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
@@ -5230,13 +5371,13 @@
 void HGraphBuilder::GenerateClassOf(CallRuntime* call) {
   // The special form detected by IsClassOfTest is detected before we get here
   // and does not cause a bailout.
-  BAILOUT("inlined runtime function: ClassOf");
+  return Bailout("inlined runtime function: ClassOf");
 }
 
 
 void HGraphBuilder::GenerateValueOf(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HValueOf* result = new(zone()) HValueOf(value);
   ast_context()->ReturnInstruction(result, call->id());
@@ -5244,15 +5385,15 @@
 
 
 void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
-  BAILOUT("inlined runtime function: SetValueOf");
+  return Bailout("inlined runtime function: SetValueOf");
 }
 
 
 // Fast support for charCodeAt(n).
 void HGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 2);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
-  VISIT_FOR_VALUE(call->arguments()->at(1));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
   HValue* index = Pop();
   HValue* string = Pop();
   HStringCharCodeAt* result = BuildStringCharCodeAt(string, index);
@@ -5263,7 +5404,7 @@
 // Fast support for string.charAt(n) and string[n].
 void HGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* char_code = Pop();
   HStringCharFromCode* result = new(zone()) HStringCharFromCode(char_code);
   ast_context()->ReturnInstruction(result, call->id());
@@ -5273,8 +5414,8 @@
 // Fast support for string.charAt(n) and string[n].
 void HGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 2);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
-  VISIT_FOR_VALUE(call->arguments()->at(1));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
   HValue* index = Pop();
   HValue* string = Pop();
   HStringCharCodeAt* char_code = BuildStringCharCodeAt(string, index);
@@ -5287,8 +5428,8 @@
 // Fast support for object equality testing.
 void HGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 2);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
-  VISIT_FOR_VALUE(call->arguments()->at(1));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
   HValue* right = Pop();
   HValue* left = Pop();
   HCompareJSObjectEq* result = new(zone()) HCompareJSObjectEq(left, right);
@@ -5304,15 +5445,14 @@
 
 // Fast support for Math.random().
 void HGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) {
-  BAILOUT("inlined runtime function: RandomHeapNumber");
+  return Bailout("inlined runtime function: RandomHeapNumber");
 }
 
 
 // Fast support for StringAdd.
 void HGraphBuilder::GenerateStringAdd(CallRuntime* call) {
   ASSERT_EQ(2, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result = new(zone()) HCallStub(context, CodeStub::StringAdd, 2);
@@ -5324,8 +5464,7 @@
 // Fast support for SubString.
 void HGraphBuilder::GenerateSubString(CallRuntime* call) {
   ASSERT_EQ(3, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result = new(zone()) HCallStub(context, CodeStub::SubString, 3);
@@ -5337,8 +5476,7 @@
 // Fast support for StringCompare.
 void HGraphBuilder::GenerateStringCompare(CallRuntime* call) {
   ASSERT_EQ(2, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result =
@@ -5351,8 +5489,7 @@
 // Support for direct calls from JavaScript to native RegExp code.
 void HGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
   ASSERT_EQ(4, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result = new(zone()) HCallStub(context, CodeStub::RegExpExec, 4);
@@ -5364,8 +5501,7 @@
 // Construct a RegExp exec result with two in-object properties.
 void HGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
   ASSERT_EQ(3, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result =
@@ -5377,15 +5513,14 @@
 
 // Support for fast native caches.
 void HGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
-  BAILOUT("inlined runtime function: GetFromCache");
+  return Bailout("inlined runtime function: GetFromCache");
 }
 
 
 // Fast support for number to string.
 void HGraphBuilder::GenerateNumberToString(CallRuntime* call) {
   ASSERT_EQ(1, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result =
@@ -5399,21 +5534,35 @@
 // indices. This should only be used if the indices are known to be
 // non-negative and within bounds of the elements array at the call site.
 void HGraphBuilder::GenerateSwapElements(CallRuntime* call) {
-  BAILOUT("inlined runtime function: SwapElements");
+  return Bailout("inlined runtime function: SwapElements");
 }
 
 
 // Fast call for custom callbacks.
 void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
-  BAILOUT("inlined runtime function: CallFunction");
+  // 1 ~ The function to call is not itself an argument to the call.
+  int arg_count = call->arguments()->length() - 1;
+  ASSERT(arg_count >= 1);  // There's always at least a receiver.
+
+  for (int i = 0; i < arg_count; ++i) {
+    CHECK_ALIVE(VisitArgument(call->arguments()->at(i)));
+  }
+  CHECK_ALIVE(VisitForValue(call->arguments()->last()));
+  HValue* function = Pop();
+  HContext* context = new HContext;
+  AddInstruction(context);
+  HInvokeFunction* result =
+      new(zone()) HInvokeFunction(context, function, arg_count);
+  Drop(arg_count);
+  ast_context()->ReturnInstruction(result, call->id());
 }
 
 
 // Fast call to math functions.
 void HGraphBuilder::GenerateMathPow(CallRuntime* call) {
   ASSERT_EQ(2, call->arguments()->length());
-  VISIT_FOR_VALUE(call->arguments()->at(0));
-  VISIT_FOR_VALUE(call->arguments()->at(1));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
   HValue* right = Pop();
   HValue* left = Pop();
   HPower* result = new(zone()) HPower(left, right);
@@ -5423,8 +5572,7 @@
 
 void HGraphBuilder::GenerateMathSin(CallRuntime* call) {
   ASSERT_EQ(1, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result =
@@ -5437,8 +5585,7 @@
 
 void HGraphBuilder::GenerateMathCos(CallRuntime* call) {
   ASSERT_EQ(1, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result =
@@ -5451,8 +5598,7 @@
 
 void HGraphBuilder::GenerateMathLog(CallRuntime* call) {
   ASSERT_EQ(1, call->arguments()->length());
-  VisitArgumentList(call->arguments());
-  CHECK_BAILOUT;
+  CHECK_ALIVE(VisitArgumentList(call->arguments()));
   HContext* context = new(zone()) HContext;
   AddInstruction(context);
   HCallStub* result =
@@ -5464,19 +5610,19 @@
 
 
 void HGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
-  BAILOUT("inlined runtime function: MathSqrt");
+  return Bailout("inlined runtime function: MathSqrt");
 }
 
 
 // Check whether two RegExps are equivalent
 void HGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
-  BAILOUT("inlined runtime function: IsRegExpEquivalent");
+  return Bailout("inlined runtime function: IsRegExpEquivalent");
 }
 
 
 void HGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
   ASSERT(call->arguments()->length() == 1);
-  VISIT_FOR_VALUE(call->arguments()->at(0));
+  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* value = Pop();
   HGetCachedArrayIndex* result = new(zone()) HGetCachedArrayIndex(value);
   ast_context()->ReturnInstruction(result, call->id());
@@ -5484,15 +5630,12 @@
 
 
 void HGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
-  BAILOUT("inlined runtime function: FastAsciiArrayJoin");
+  return Bailout("inlined runtime function: FastAsciiArrayJoin");
 }
 
 
-#undef BAILOUT
 #undef CHECK_BAILOUT
-#undef VISIT_FOR_EFFECT
-#undef VISIT_FOR_VALUE
-#undef ADD_TO_SUBGRAPH
+#undef CHECK_ALIVE
 
 
 HEnvironment::HEnvironment(HEnvironment* outer,
diff --git a/src/ia32/assembler-ia32-inl.h b/src/ia32/assembler-ia32-inl.h
index a9247f4..7f7e349 100644
--- a/src/ia32/assembler-ia32-inl.h
+++ b/src/ia32/assembler-ia32-inl.h
@@ -30,7 +30,7 @@
 
 // The original source code covered by the above license above has been
 // modified significantly by Google Inc.
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 
 // A light-weight IA32 Assembler.
 
@@ -311,8 +311,12 @@
 }
 
 
-void Assembler::emit(uint32_t x, RelocInfo::Mode rmode) {
-  if (rmode != RelocInfo::NONE) RecordRelocInfo(rmode);
+void Assembler::emit(uint32_t x, RelocInfo::Mode rmode, unsigned id) {
+  if (rmode == RelocInfo::CODE_TARGET && id != kNoASTId) {
+    RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, static_cast<intptr_t>(id));
+  } else if (rmode != RelocInfo::NONE) {
+    RecordRelocInfo(rmode);
+  }
   emit(x);
 }
 
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index 9273037..a91b0c4 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -1589,13 +1589,15 @@
 }
 
 
-void Assembler::call(Handle<Code> code, RelocInfo::Mode rmode) {
+void Assembler::call(Handle<Code> code,
+                     RelocInfo::Mode rmode,
+                     unsigned ast_id) {
   positions_recorder()->WriteRecordedPositions();
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   ASSERT(RelocInfo::IsCodeTarget(rmode));
   EMIT(0xE8);
-  emit(reinterpret_cast<intptr_t>(code.location()), rmode);
+  emit(reinterpret_cast<intptr_t>(code.location()), rmode, ast_id);
 }
 
 
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index 079dca7..86ce8a6 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -848,7 +848,9 @@
   void call(Label* L);
   void call(byte* entry, RelocInfo::Mode rmode);
   void call(const Operand& adr);
-  void call(Handle<Code> code, RelocInfo::Mode rmode);
+  void call(Handle<Code> code,
+            RelocInfo::Mode rmode,
+            unsigned ast_id = kNoASTId);
 
   // Jumps
   void jmp(Label* L);  // unconditional jump to L
@@ -1070,7 +1072,9 @@
   void GrowBuffer();
   inline void emit(uint32_t x);
   inline void emit(Handle<Object> handle);
-  inline void emit(uint32_t x, RelocInfo::Mode rmode);
+  inline void emit(uint32_t x,
+                   RelocInfo::Mode rmode,
+                   unsigned ast_id = kNoASTId);
   inline void emit(const Immediate& x);
   inline void emit_w(const Immediate& x);
 
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index cef3fdc..275e8e2 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -446,6 +446,9 @@
     case TRBinaryOpIC::ODDBALL:
       GenerateOddballStub(masm);
       break;
+    case TRBinaryOpIC::BOTH_STRING:
+      GenerateBothStringStub(masm);
+      break;
     case TRBinaryOpIC::STRING:
       GenerateStringStub(masm);
       break;
@@ -909,6 +912,38 @@
 }
 
 
+void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
+  Label call_runtime;
+  ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING);
+  ASSERT(op_ == Token::ADD);
+  // If both arguments are strings, call the string add stub.
+  // Otherwise, do a transition.
+
+  // Registers containing left and right operands respectively.
+  Register left = edx;
+  Register right = eax;
+
+  // Test if left operand is a string.
+  __ test(left, Immediate(kSmiTagMask));
+  __ j(zero, &call_runtime);
+  __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
+  __ j(above_equal, &call_runtime);
+
+  // Test if right operand is a string.
+  __ test(right, Immediate(kSmiTagMask));
+  __ j(zero, &call_runtime);
+  __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
+  __ j(above_equal, &call_runtime);
+
+  StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
+  GenerateRegisterArgsPush(masm);
+  __ TailCallStub(&string_add_stub);
+
+  __ bind(&call_runtime);
+  GenerateTypeTransition(masm);
+}
+
+
 void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
   Label call_runtime;
   ASSERT(operands_type_ == TRBinaryOpIC::INT32);
@@ -1120,23 +1155,25 @@
     GenerateAddStrings(masm);
   }
 
+  Factory* factory = masm->isolate()->factory();
+
   // Convert odd ball arguments to numbers.
   NearLabel check, done;
-  __ cmp(edx, FACTORY->undefined_value());
+  __ cmp(edx, factory->undefined_value());
   __ j(not_equal, &check);
   if (Token::IsBitOp(op_)) {
     __ xor_(edx, Operand(edx));
   } else {
-    __ mov(edx, Immediate(FACTORY->nan_value()));
+    __ mov(edx, Immediate(factory->nan_value()));
   }
   __ jmp(&done);
   __ bind(&check);
-  __ cmp(eax, FACTORY->undefined_value());
+  __ cmp(eax, factory->undefined_value());
   __ j(not_equal, &done);
   if (Token::IsBitOp(op_)) {
     __ xor_(eax, Operand(eax));
   } else {
-    __ mov(eax, Immediate(FACTORY->nan_value()));
+    __ mov(eax, Immediate(factory->nan_value()));
   }
   __ bind(&done);
 
diff --git a/src/ia32/code-stubs-ia32.h b/src/ia32/code-stubs-ia32.h
index 80a75cd..cf73682 100644
--- a/src/ia32/code-stubs-ia32.h
+++ b/src/ia32/code-stubs-ia32.h
@@ -153,6 +153,7 @@
   void GenerateHeapNumberStub(MacroAssembler* masm);
   void GenerateOddballStub(MacroAssembler* masm);
   void GenerateStringStub(MacroAssembler* masm);
+  void GenerateBothStringStub(MacroAssembler* masm);
   void GenerateGenericStub(MacroAssembler* masm);
   void GenerateAddStrings(MacroAssembler* masm);
 
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index aeee584..12287a5 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -44,6 +44,11 @@
 
 #define __ ACCESS_MASM(masm_)
 
+static unsigned GetPropertyId(Property* property) {
+  if (property->is_synthetic()) return AstNode::kNoNumber;
+  return property->id();
+}
+
 
 class JumpPatchSite BASE_EMBEDDED {
  public:
@@ -735,7 +740,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
     }
   }
 }
@@ -808,7 +813,7 @@
     // Record position before stub call for type feedback.
     SetSourcePosition(clause->position());
     Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
-    EmitCallIC(ic, &patch_site);
+    EmitCallIC(ic, &patch_site, clause->label()->id());
     __ test(eax, Operand(eax));
     __ j(not_equal, &next_test);
     __ Drop(1);  // Switch value is no longer needed.
@@ -1106,7 +1111,7 @@
   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
       ? RelocInfo::CODE_TARGET
       : RelocInfo::CODE_TARGET_CONTEXT;
-  EmitCallIC(ic, mode);
+  EmitCallIC(ic, mode, AstNode::kNoNumber);
 }
 
 
@@ -1187,7 +1192,7 @@
           __ mov(eax, Immediate(key_literal->handle()));
           Handle<Code> ic =
               isolate()->builtins()->KeyedLoadIC_Initialize();
-          EmitCallIC(ic, RelocInfo::CODE_TARGET);
+          EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
           __ jmp(done);
         }
       }
@@ -1210,7 +1215,7 @@
     __ mov(eax, GlobalObjectOperand());
     __ mov(ecx, var->name());
     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
     context()->Plug(eax);
 
   } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
@@ -1273,7 +1278,7 @@
 
     // Do a keyed property load.
     Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
 
     // Drop key and object left on the stack by IC.
     context()->Plug(eax);
@@ -1386,7 +1391,7 @@
             Handle<Code> ic = is_strict_mode()
                 ? isolate()->builtins()->StoreIC_Initialize_Strict()
                 : isolate()->builtins()->StoreIC_Initialize();
-            EmitCallIC(ic, RelocInfo::CODE_TARGET);
+            EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
             PrepareForBailoutForId(key->id(), NO_REGISTERS);
           } else {
             VisitForEffect(value);
@@ -1593,13 +1598,13 @@
     SetSourcePosition(expr->position() + 1);
     AccumulatorValueContext context(this);
     if (ShouldInlineSmiCase(op)) {
-      EmitInlineSmiBinaryOp(expr,
+      EmitInlineSmiBinaryOp(expr->binary_operation(),
                             op,
                             mode,
                             expr->target(),
                             expr->value());
     } else {
-      EmitBinaryOp(op, mode);
+      EmitBinaryOp(expr->binary_operation(), op, mode);
     }
 
     // Deoptimization point in case the binary operation may have side effects.
@@ -1634,18 +1639,18 @@
   Literal* key = prop->key()->AsLiteral();
   __ mov(ecx, Immediate(key->handle()));
   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
 }
 
 
 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
 }
 
 
-void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
+void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
                                               Token::Value op,
                                               OverwriteMode mode,
                                               Expression* left,
@@ -1662,7 +1667,7 @@
   __ bind(&stub_call);
   __ mov(eax, ecx);
   TypeRecordingBinaryOpStub stub(op, mode);
-  EmitCallIC(stub.GetCode(), &patch_site);
+  EmitCallIC(stub.GetCode(), &patch_site, expr->id());
   __ jmp(&done);
 
   // Smi case.
@@ -1740,11 +1745,13 @@
 }
 
 
-void FullCodeGenerator::EmitBinaryOp(Token::Value op,
+void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
+                                     Token::Value op,
                                      OverwriteMode mode) {
   __ pop(edx);
   TypeRecordingBinaryOpStub stub(op, mode);
-  EmitCallIC(stub.GetCode(), NULL);  // NULL signals no inlined smi code.
+  // NULL signals no inlined smi code.
+  EmitCallIC(stub.GetCode(), NULL, expr->id());
   context()->Plug(eax);
 }
 
@@ -1784,7 +1791,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->StoreIC_Initialize_Strict()
           : isolate()->builtins()->StoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
       break;
     }
     case KEYED_PROPERTY: {
@@ -1807,7 +1814,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
       break;
     }
   }
@@ -1833,7 +1840,7 @@
     Handle<Code> ic = is_strict_mode()
         ? isolate()->builtins()->StoreIC_Initialize_Strict()
         : isolate()->builtins()->StoreIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
 
   } else if (op == Token::INIT_CONST) {
     // Like var declarations, const declarations are hoisted to function
@@ -1936,7 +1943,7 @@
   Handle<Code> ic = is_strict_mode()
       ? isolate()->builtins()->StoreIC_Initialize_Strict()
       : isolate()->builtins()->StoreIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
 
   // If the assignment ends an initialization block, revert to fast case.
   if (expr->ends_initialization_block()) {
@@ -1976,7 +1983,7 @@
   Handle<Code> ic = is_strict_mode()
       ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
       : isolate()->builtins()->KeyedStoreIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
 
   // If the assignment ends an initialization block, revert to fast case.
   if (expr->ends_initialization_block()) {
@@ -2027,7 +2034,7 @@
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
       arg_count, in_loop);
-  EmitCallIC(ic, mode);
+  EmitCallIC(ic, mode, expr->id());
   RecordJSReturnSite(expr);
   // Restore context register.
   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2061,7 +2068,7 @@
   Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
       arg_count, in_loop);
   __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize));  // Key.
-  EmitCallIC(ic, mode);
+  EmitCallIC(ic, mode, expr->id());
   RecordJSReturnSite(expr);
   // Restore context register.
   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2252,7 +2259,7 @@
         SetSourcePosition(prop->position());
 
         Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-        EmitCallIC(ic, RelocInfo::CODE_TARGET);
+        EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
         // Push result (function).
         __ push(eax);
         // Push Global receiver.
@@ -3087,15 +3094,14 @@
 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
   ASSERT(args->length() >= 2);
 
-  int arg_count = args->length() - 2;  // For receiver and function.
-  VisitForStackValue(args->at(0));  // Receiver.
-  for (int i = 0; i < arg_count; i++) {
-    VisitForStackValue(args->at(i + 1));
+  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
+  for (int i = 0; i < arg_count + 1; ++i) {
+    VisitForStackValue(args->at(i));
   }
-  VisitForAccumulatorValue(args->at(arg_count + 1));  // Function.
+  VisitForAccumulatorValue(args->last());  // Function.
 
-  // InvokeFunction requires function in edi. Move it in there.
-  if (!result_register().is(edi)) __ mov(edi, result_register());
+  // InvokeFunction requires the function in edi. Move it in there.
+  __ mov(edi, result_register());
   ParameterCount count(arg_count);
   __ InvokeFunction(edi, count, CALL_FUNCTION);
   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -3612,7 +3618,7 @@
     InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
     Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
         arg_count, in_loop);
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
     // Restore context register.
     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
   } else {
@@ -3899,7 +3905,7 @@
   __ mov(edx, eax);
   __ mov(eax, Immediate(Smi::FromInt(1)));
   TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
-  EmitCallIC(stub.GetCode(), &patch_site);
+  EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
   __ bind(&done);
 
   // Store the value returned in eax.
@@ -3932,7 +3938,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->StoreIC_Initialize_Strict()
           : isolate()->builtins()->StoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3949,7 +3955,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       if (expr->is_postfix()) {
         // Result is on the stack
@@ -3977,7 +3983,7 @@
     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
     // Use a regular load, not a contextual load, to avoid a reference
     // error.
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
     PrepareForBailout(expr, TOS_REG);
     context()->Plug(eax);
   } else if (proxy != NULL &&
@@ -4176,7 +4182,7 @@
       // Record position and call the compare IC.
       SetSourcePosition(expr->position());
       Handle<Code> ic = CompareIC::GetUninitialized(op);
-      EmitCallIC(ic, &patch_site);
+      EmitCallIC(ic, &patch_site, expr->id());
 
       PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
       __ test(eax, Operand(eax));
@@ -4236,7 +4242,9 @@
 }
 
 
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
+                                   RelocInfo::Mode mode,
+                                   unsigned ast_id) {
   ASSERT(mode == RelocInfo::CODE_TARGET ||
          mode == RelocInfo::CODE_TARGET_CONTEXT);
   switch (ic->kind()) {
@@ -4254,34 +4262,13 @@
     default:
       break;
   }
-
-  __ call(ic, mode);
-
-  // Crankshaft doesn't need patching of inlined loads and stores.
-  // When compiling the snapshot we need to produce code that works
-  // with and without Crankshaft.
-  if (V8::UseCrankshaft() && !Serializer::enabled()) {
-    return;
-  }
-
-  // If we're calling a (keyed) load or store stub, we have to mark
-  // the call as containing no inlined code so we will not attempt to
-  // patch it.
-  switch (ic->kind()) {
-    case Code::LOAD_IC:
-    case Code::KEYED_LOAD_IC:
-    case Code::STORE_IC:
-    case Code::KEYED_STORE_IC:
-      __ nop();  // Signals no inlined code.
-      break;
-    default:
-      // Do nothing.
-      break;
-  }
+  __ call(ic, mode, ast_id);
 }
 
 
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
+                                   JumpPatchSite* patch_site,
+                                   unsigned ast_id) {
   Counters* counters = isolate()->counters();
   switch (ic->kind()) {
     case Code::LOAD_IC:
@@ -4298,8 +4285,7 @@
     default:
       break;
   }
-
-  __ call(ic, RelocInfo::CODE_TARGET);
+  __ call(ic, RelocInfo::CODE_TARGET, ast_id);
   if (patch_site != NULL && patch_site->is_bound()) {
     patch_site->EmitPatchInfo();
   } else {
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index b7af03c..4106f01 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -371,12 +371,6 @@
 }
 
 
-// The offset from the inlined patch site to the start of the
-// inlined load instruction.  It is 7 bytes (test eax, imm) plus
-// 6 bytes (jne slow_label).
-const int LoadIC::kOffsetToLoadInstruction = 13;
-
-
 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax    : receiver
@@ -1273,172 +1267,6 @@
 }
 
 
-bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
-  if (V8::UseCrankshaft()) return false;
-
-  // The address of the instruction following the call.
-  Address test_instruction_address =
-      address + Assembler::kCallTargetAddressOffset;
-  // If the instruction following the call is not a test eax, nothing
-  // was inlined.
-  if (*test_instruction_address != Assembler::kTestEaxByte) return false;
-
-  Address delta_address = test_instruction_address + 1;
-  // The delta to the start of the map check instruction.
-  int delta = *reinterpret_cast<int*>(delta_address);
-
-  // The map address is the last 4 bytes of the 7-byte
-  // operand-immediate compare instruction, so we add 3 to get the
-  // offset to the last 4 bytes.
-  Address map_address = test_instruction_address + delta + 3;
-  *(reinterpret_cast<Object**>(map_address)) = map;
-
-  // The offset is in the last 4 bytes of a six byte
-  // memory-to-register move instruction, so we add 2 to get the
-  // offset to the last 4 bytes.
-  Address offset_address =
-      test_instruction_address + delta + kOffsetToLoadInstruction + 2;
-  *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
-  return true;
-}
-
-
-// One byte opcode for mov ecx,0xXXXXXXXX.
-// Marks inlined contextual loads using all kinds of cells. Generated
-// code has the hole check:
-//   mov reg, <cell>
-//   mov reg, (<cell>, value offset)
-//   cmp reg, <the hole>
-//   je  slow
-//   ;; use reg
-static const byte kMovEcxByte = 0xB9;
-
-// One byte opcode for mov edx,0xXXXXXXXX.
-// Marks inlined contextual loads using only "don't delete"
-// cells. Generated code doesn't have the hole check:
-//   mov reg, <cell>
-//   mov reg, (<cell>, value offset)
-//   ;; use reg
-static const byte kMovEdxByte = 0xBA;
-
-bool LoadIC::PatchInlinedContextualLoad(Address address,
-                                        Object* map,
-                                        Object* cell,
-                                        bool is_dont_delete) {
-  if (V8::UseCrankshaft()) return false;
-
-  // The address of the instruction following the call.
-  Address mov_instruction_address =
-      address + Assembler::kCallTargetAddressOffset;
-  // If the instruction following the call is not a mov ecx/edx,
-  // nothing was inlined.
-  byte b = *mov_instruction_address;
-  if (b != kMovEcxByte && b != kMovEdxByte) return false;
-  // If we don't have the hole check generated, we can only support
-  // "don't delete" cells.
-  if (b == kMovEdxByte && !is_dont_delete) return false;
-
-  Address delta_address = mov_instruction_address + 1;
-  // The delta to the start of the map check instruction.
-  int delta = *reinterpret_cast<int*>(delta_address);
-
-  // The map address is the last 4 bytes of the 7-byte
-  // operand-immediate compare instruction, so we add 3 to get the
-  // offset to the last 4 bytes.
-  Address map_address = mov_instruction_address + delta + 3;
-  *(reinterpret_cast<Object**>(map_address)) = map;
-
-  // The cell is in the last 4 bytes of a five byte mov reg, imm32
-  // instruction, so we add 1 to get the offset to the last 4 bytes.
-  Address offset_address =
-      mov_instruction_address + delta + kOffsetToLoadInstruction + 1;
-  *reinterpret_cast<Object**>(offset_address) = cell;
-  return true;
-}
-
-
-bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
-  if (V8::UseCrankshaft()) return false;
-
-  // The address of the instruction following the call.
-  Address test_instruction_address =
-      address + Assembler::kCallTargetAddressOffset;
-
-  // If the instruction following the call is not a test eax, nothing
-  // was inlined.
-  if (*test_instruction_address != Assembler::kTestEaxByte) return false;
-
-  // Extract the encoded deltas from the test eax instruction.
-  Address encoded_offsets_address = test_instruction_address + 1;
-  int encoded_offsets = *reinterpret_cast<int*>(encoded_offsets_address);
-  int delta_to_map_check = -(encoded_offsets & 0xFFFF);
-  int delta_to_record_write = encoded_offsets >> 16;
-
-  // Patch the map to check. The map address is the last 4 bytes of
-  // the 7-byte operand-immediate compare instruction.
-  Address map_check_address = test_instruction_address + delta_to_map_check;
-  Address map_address = map_check_address + 3;
-  *(reinterpret_cast<Object**>(map_address)) = map;
-
-  // Patch the offset in the store instruction. The offset is in the
-  // last 4 bytes of a six byte register-to-memory move instruction.
-  Address offset_address =
-      map_check_address + StoreIC::kOffsetToStoreInstruction + 2;
-  // The offset should have initial value (kMaxInt - 1), cleared value
-  // (-1) or we should be clearing the inlined version.
-  ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 ||
-         *reinterpret_cast<int*>(offset_address) == -1 ||
-         (offset == 0 && map == HEAP->null_value()));
-  *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
-
-  // Patch the offset in the write-barrier code. The offset is the
-  // last 4 bytes of a six byte lea instruction.
-  offset_address = map_check_address + delta_to_record_write + 2;
-  // The offset should have initial value (kMaxInt), cleared value
-  // (-1) or we should be clearing the inlined version.
-  ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt ||
-         *reinterpret_cast<int*>(offset_address) == -1 ||
-         (offset == 0 && map == HEAP->null_value()));
-  *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
-
-  return true;
-}
-
-
-static bool PatchInlinedMapCheck(Address address, Object* map) {
-  if (V8::UseCrankshaft()) return false;
-
-  Address test_instruction_address =
-      address + Assembler::kCallTargetAddressOffset;
-  // The keyed load has a fast inlined case if the IC call instruction
-  // is immediately followed by a test instruction.
-  if (*test_instruction_address != Assembler::kTestEaxByte) return false;
-
-  // Fetch the offset from the test instruction to the map cmp
-  // instruction.  This offset is stored in the last 4 bytes of the 5
-  // byte test instruction.
-  Address delta_address = test_instruction_address + 1;
-  int delta = *reinterpret_cast<int*>(delta_address);
-  // Compute the map address.  The map address is in the last 4 bytes
-  // of the 7-byte operand-immediate compare instruction, so we add 3
-  // to the offset to get the map address.
-  Address map_address = test_instruction_address + delta + 3;
-  // Patch the map check.
-  *(reinterpret_cast<Object**>(map_address)) = map;
-  return true;
-}
-
-
-bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
-  return PatchInlinedMapCheck(address, map);
-}
-
-
-bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
-  return PatchInlinedMapCheck(address, map);
-}
-
-
 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax    : key
@@ -1519,12 +1347,6 @@
 }
 
 
-// The offset from the inlined patch site to the start of the inlined
-// store instruction.  It is 7 bytes (test reg, imm) plus 6 bytes (jne
-// slow_label).
-const int StoreIC::kOffsetToStoreInstruction = 13;
-
-
 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax    : value
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 0f96f78..46c71e8 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -77,7 +77,7 @@
 
 void LCodeGen::FinishCode(Handle<Code> code) {
   ASSERT(is_done());
-  code->set_stack_slots(StackSlotCount());
+  code->set_stack_slots(GetStackSlotCount());
   code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
   PopulateDeoptimizationData(code);
   Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
@@ -132,7 +132,7 @@
   __ push(edi);  // Callee's JS function.
 
   // Reserve space for the stack slots needed by the code.
-  int slots = StackSlotCount();
+  int slots = GetStackSlotCount();
   if (slots > 0) {
     if (FLAG_debug_code) {
       __ mov(Operand(eax), Immediate(slots));
@@ -254,7 +254,7 @@
 
 bool LCodeGen::GenerateSafepointTable() {
   ASSERT(is_done());
-  safepoints_.Emit(masm(), StackSlotCount());
+  safepoints_.Emit(masm(), GetStackSlotCount());
   return !is_aborted();
 }
 
@@ -386,7 +386,7 @@
     translation->StoreDoubleStackSlot(op->index());
   } else if (op->IsArgument()) {
     ASSERT(is_tagged);
-    int src_index = StackSlotCount() + op->index();
+    int src_index = GetStackSlotCount() + op->index();
     translation->StoreStackSlot(src_index);
   } else if (op->IsRegister()) {
     Register reg = ToRegister(op);
@@ -2057,7 +2057,7 @@
   }
   __ mov(esp, ebp);
   __ pop(ebp);
-  __ Ret((ParameterCount() + 1) * kPointerSize, ecx);
+  __ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
 }
 
 
@@ -2493,7 +2493,7 @@
   SafepointGenerator safepoint_generator(this,
                                          pointers,
                                          env->deoptimization_index());
-  v8::internal::ParameterCount actual(eax);
+  ParameterCount actual(eax);
   __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
 }
 
@@ -2707,25 +2707,16 @@
   Register output_reg = ToRegister(instr->result());
   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
 
+  Label below_half, done;
   // xmm_scratch = 0.5
   ExternalReference one_half = ExternalReference::address_of_one_half();
   __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
 
+  __ ucomisd(xmm_scratch, input_reg);
+  __ j(above, &below_half);
   // input = input + 0.5
   __ addsd(input_reg, xmm_scratch);
 
-  // We need to return -0 for the input range [-0.5, 0[, otherwise
-  // compute Math.floor(value + 0.5).
-  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
-    __ ucomisd(input_reg, xmm_scratch);
-    DeoptimizeIf(below_equal, instr->environment());
-  } else {
-    // If we don't need to bailout on -0, we check only bailout
-    // on negative inputs.
-    __ xorpd(xmm_scratch, xmm_scratch);  // Zero the register.
-    __ ucomisd(input_reg, xmm_scratch);
-    DeoptimizeIf(below, instr->environment());
-  }
 
   // Compute Math.floor(value + 0.5).
   // Use truncating instruction (OK because input is positive).
@@ -2734,6 +2725,27 @@
   // Overflow is signalled with minint.
   __ cmp(output_reg, 0x80000000u);
   DeoptimizeIf(equal, instr->environment());
+  __ jmp(&done);
+
+  __ bind(&below_half);
+
+  // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
+  // we can ignore the difference between a result of -0 and +0.
+  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+    // If the sign is positive, we return +0.
+    __ movmskpd(output_reg, input_reg);
+    __ test(output_reg, Immediate(1));
+    DeoptimizeIf(not_zero, instr->environment());
+  } else {
+    // If the input is >= -0.5, we return +0.
+    __ mov(output_reg, Immediate(0xBF000000));
+    __ movd(xmm_scratch, Operand(output_reg));
+    __ cvtss2sd(xmm_scratch, xmm_scratch);
+    __ ucomisd(input_reg, xmm_scratch);
+    DeoptimizeIf(below, instr->environment());
+  }
+  __ Set(output_reg, Immediate(0));
+  __ bind(&done);
 }
 
 
@@ -2893,6 +2905,21 @@
 }
 
 
+void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
+  ASSERT(ToRegister(instr->context()).is(esi));
+  ASSERT(ToRegister(instr->function()).is(edi));
+  ASSERT(instr->HasPointerMap());
+  ASSERT(instr->HasDeoptimizationEnvironment());
+  LPointerMap* pointers = instr->pointer_map();
+  LEnvironment* env = instr->deoptimization_environment();
+  RecordPosition(pointers->position());
+  RegisterEnvironmentForDeoptimization(env);
+  SafepointGenerator generator(this, pointers, env->deoptimization_index());
+  ParameterCount count(instr->arity());
+  __ InvokeFunction(edi, count, CALL_FUNCTION, &generator);
+}
+
+
 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
   ASSERT(ToRegister(instr->context()).is(esi));
   ASSERT(ToRegister(instr->key()).is(ecx));
@@ -3296,6 +3323,22 @@
 }
 
 
+void LCodeGen::DoStringAdd(LStringAdd* instr) {
+  if (instr->left()->IsConstantOperand()) {
+    __ push(ToImmediate(instr->left()));
+  } else {
+    __ push(ToOperand(instr->left()));
+  }
+  if (instr->right()->IsConstantOperand()) {
+    __ push(ToImmediate(instr->right()));
+  } else {
+    __ push(ToOperand(instr->right()));
+  }
+  StringAddStub stub(NO_STRING_CHECK_IN_STUB);
+  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
+}
+
+
 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
   LOperand* input = instr->InputAt(0);
   ASSERT(input->IsRegister() || input->IsStackSlot());
diff --git a/src/ia32/lithium-codegen-ia32.h b/src/ia32/lithium-codegen-ia32.h
index 6d42cd7..f8bbea3 100644
--- a/src/ia32/lithium-codegen-ia32.h
+++ b/src/ia32/lithium-codegen-ia32.h
@@ -147,8 +147,8 @@
                        Register temporary,
                        Register temporary2);
 
-  int StackSlotCount() const { return chunk()->spill_slot_count(); }
-  int ParameterCount() const { return scope()->num_parameters(); }
+  int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
+  int GetParameterCount() const { return scope()->num_parameters(); }
 
   void Abort(const char* format, ...);
   void Comment(const char* format, ...);
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index 9ccd189..aa91a83 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -71,22 +71,21 @@
 
 #ifdef DEBUG
 void LInstruction::VerifyCall() {
-  // Call instructions can use only fixed registers as
-  // temporaries and outputs because all registers
-  // are blocked by the calling convention.
-  // Inputs must use a fixed register.
+  // Call instructions can use only fixed registers as temporaries and
+  // outputs because all registers are blocked by the calling convention.
+  // Inputs operands must use a fixed register or use-at-start policy or
+  // a non-register policy.
   ASSERT(Output() == NULL ||
          LUnallocated::cast(Output())->HasFixedPolicy() ||
          !LUnallocated::cast(Output())->HasRegisterPolicy());
   for (UseIterator it(this); it.HasNext(); it.Advance()) {
-    LOperand* operand = it.Next();
-    ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
-           !LUnallocated::cast(operand)->HasRegisterPolicy());
+    LUnallocated* operand = LUnallocated::cast(it.Next());
+    ASSERT(operand->HasFixedPolicy() ||
+           operand->IsUsedAtStart());
   }
   for (TempIterator it(this); it.HasNext(); it.Advance()) {
-    LOperand* operand = it.Next();
-    ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
-           !LUnallocated::cast(operand)->HasRegisterPolicy());
+    LUnallocated* operand = LUnallocated::cast(it.Next());
+    ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
   }
 }
 #endif
@@ -303,6 +302,15 @@
 }
 
 
+void LInvokeFunction::PrintDataTo(StringStream* stream) {
+  stream->Add("= ");
+  InputAt(0)->PrintTo(stream);
+  stream->Add(" ");
+  InputAt(1)->PrintTo(stream);
+  stream->Add(" #%d / ", arity());
+}
+
+
 void LCallKeyed::PrintDataTo(StringStream* stream) {
   stream->Add("[ecx] #%d / ", arity());
 }
@@ -1222,6 +1230,15 @@
 }
 
 
+LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
+  LOperand* context = UseFixed(instr->context(), esi);
+  LOperand* function = UseFixed(instr->function(), edi);
+  argument_count_ -= instr->argument_count();
+  LInvokeFunction* result = new LInvokeFunction(context, function);
+  return MarkAsCall(DefineFixed(result, eax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
+}
+
+
 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
   BuiltinFunctionId op = instr->op();
   if (op == kMathLog) {
@@ -2002,6 +2019,13 @@
 }
 
 
+LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
+  LOperand* left = UseOrConstantAtStart(instr->left());
+  LOperand* right = UseOrConstantAtStart(instr->right());
+  return MarkAsCall(DefineFixed(new LStringAdd(left, right), eax), instr);
+}
+
+
 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
   LOperand* string = UseRegister(instr->string());
   LOperand* index = UseRegisterOrConstant(instr->index());
@@ -2046,7 +2070,8 @@
 
 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
   LDeleteProperty* result =
-      new LDeleteProperty(Use(instr->object()), UseOrConstant(instr->key()));
+      new LDeleteProperty(UseAtStart(instr->object()),
+                          UseOrConstantAtStart(instr->key()));
   return MarkAsCall(DefineFixed(result, eax), instr);
 }
 
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index 9ace8f8..76c90be 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -39,6 +39,7 @@
 // Forward declarations.
 class LCodeGen;
 
+
 #define LITHIUM_ALL_INSTRUCTION_LIST(V)         \
   V(ControlInstruction)                         \
   V(Call)                                       \
@@ -106,6 +107,7 @@
   V(InstanceOfAndBranch)                        \
   V(InstanceOfKnownGlobal)                      \
   V(Integer32ToDouble)                          \
+  V(InvokeFunction)                             \
   V(IsNull)                                     \
   V(IsNullAndBranch)                            \
   V(IsObject)                                   \
@@ -154,6 +156,7 @@
   V(StoreKeyedSpecializedArrayElement)          \
   V(StoreNamedField)                            \
   V(StoreNamedGeneric)                          \
+  V(StringAdd)                                  \
   V(StringCharCodeAt)                           \
   V(StringCharFromCode)                         \
   V(StringLength)                               \
@@ -1450,6 +1453,25 @@
 };
 
 
+class LInvokeFunction: public LTemplateInstruction<1, 2, 0> {
+ public:
+  LInvokeFunction(LOperand* context, LOperand* function) {
+    inputs_[0] = context;
+    inputs_[1] = function;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
+  DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
+
+  LOperand* context() { return inputs_[0]; }
+  LOperand* function() { return inputs_[1]; }
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  int arity() const { return hydrogen()->argument_count() - 1; }
+};
+
+
 class LCallKeyed: public LTemplateInstruction<1, 2, 0> {
  public:
   LCallKeyed(LOperand* context, LOperand* key) {
@@ -1769,6 +1791,21 @@
 };
 
 
+class LStringAdd: public LTemplateInstruction<1, 2, 0> {
+ public:
+  LStringAdd(LOperand* left, LOperand* right) {
+    inputs_[0] = left;
+    inputs_[1] = right;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
+  DECLARE_HYDROGEN_ACCESSOR(StringAdd)
+
+  LOperand* left() { return inputs_[0]; }
+  LOperand* right() { return inputs_[1]; }
+};
+
+
 class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
  public:
   LStringCharCodeAt(LOperand* string, LOperand* index) {
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index ad567bc..0c24fc4 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -1104,9 +1104,9 @@
 }
 
 
-void MacroAssembler::CallStub(CodeStub* stub) {
+void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
   ASSERT(allow_stub_calls());  // Calls are not allowed in some stubs.
-  call(stub->GetCode(), RelocInfo::CODE_TARGET);
+  call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
 }
 
 
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index 6909272..f1666ba 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -452,7 +452,7 @@
   // Runtime calls
 
   // Call a code stub.  Generate the code if necessary.
-  void CallStub(CodeStub* stub);
+  void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId);
 
   // Call a code stub and return the code object called.  Try to generate
   // the code if necessary.  Do not perform a GC but instead return a retry
diff --git a/src/ic.cc b/src/ic.cc
index 99eb21f..2299922 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -304,54 +304,23 @@
 }
 
 
-void KeyedLoadIC::ClearInlinedVersion(Address address) {
-  // Insert null as the map to check for to make sure the map check fails
-  // sending control flow to the IC instead of the inlined version.
-  PatchInlinedLoad(address, HEAP->null_value());
-}
-
-
 void KeyedLoadIC::Clear(Address address, Code* target) {
   if (target->ic_state() == UNINITIALIZED) return;
   // Make sure to also clear the map used in inline fast cases.  If we
   // do not clear these maps, cached code can keep objects alive
   // through the embedded maps.
-  ClearInlinedVersion(address);
   SetTargetAtAddress(address, initialize_stub());
 }
 
 
-void LoadIC::ClearInlinedVersion(Address address) {
-  // Reset the map check of the inlined inobject property load (if
-  // present) to guarantee failure by holding an invalid map (the null
-  // value).  The offset can be patched to anything.
-  Heap* heap = HEAP;
-  PatchInlinedLoad(address, heap->null_value(), 0);
-  PatchInlinedContextualLoad(address,
-                             heap->null_value(),
-                             heap->null_value(),
-                             true);
-}
-
-
 void LoadIC::Clear(Address address, Code* target) {
   if (target->ic_state() == UNINITIALIZED) return;
-  ClearInlinedVersion(address);
   SetTargetAtAddress(address, initialize_stub());
 }
 
 
-void StoreIC::ClearInlinedVersion(Address address) {
-  // Reset the map check of the inlined inobject property store (if
-  // present) to guarantee failure by holding an invalid map (the null
-  // value).  The offset can be patched to anything.
-  PatchInlinedStore(address, HEAP->null_value(), 0);
-}
-
-
 void StoreIC::Clear(Address address, Code* target) {
   if (target->ic_state() == UNINITIALIZED) return;
-  ClearInlinedVersion(address);
   SetTargetAtAddress(address,
       (target->extra_ic_state() == kStrictMode)
         ? initialize_stub_strict()
@@ -359,21 +328,6 @@
 }
 
 
-void KeyedStoreIC::ClearInlinedVersion(Address address) {
-  // Insert null as the elements map to check for.  This will make
-  // sure that the elements fast-case map check fails so that control
-  // flows to the IC instead of the inlined version.
-  PatchInlinedStore(address, HEAP->null_value());
-}
-
-
-void KeyedStoreIC::RestoreInlinedVersion(Address address) {
-  // Restore the fast-case elements map check so that the inlined
-  // version can be used again.
-  PatchInlinedStore(address, HEAP->fixed_array_map());
-}
-
-
 void KeyedStoreIC::Clear(Address address, Code* target) {
   if (target->ic_state() == UNINITIALIZED) return;
   SetTargetAtAddress(address,
@@ -873,9 +827,6 @@
 #endif
       if (state == PREMONOMORPHIC) {
         if (object->IsString()) {
-          Map* map = HeapObject::cast(*object)->map();
-          const int offset = String::kLengthOffset;
-          PatchInlinedLoad(address(), map, offset);
           set_target(isolate()->builtins()->builtin(
               Builtins::kLoadIC_StringLength));
         } else {
@@ -903,9 +854,6 @@
       if (FLAG_trace_ic) PrintF("[LoadIC : +#length /array]\n");
 #endif
       if (state == PREMONOMORPHIC) {
-        Map* map = HeapObject::cast(*object)->map();
-        const int offset = JSArray::kLengthOffset;
-        PatchInlinedLoad(address(), map, offset);
         set_target(isolate()->builtins()->builtin(
             Builtins::kLoadIC_ArrayLength));
       } else {
@@ -948,63 +896,6 @@
     LOG(isolate(), SuspectReadEvent(*name, *object));
   }
 
-  bool can_be_inlined_precheck =
-      FLAG_use_ic &&
-      lookup.IsProperty() &&
-      lookup.IsCacheable() &&
-      lookup.holder() == *object &&
-      !object->IsAccessCheckNeeded();
-
-  bool can_be_inlined =
-      can_be_inlined_precheck &&
-      state == PREMONOMORPHIC &&
-      lookup.type() == FIELD;
-
-  bool can_be_inlined_contextual =
-      can_be_inlined_precheck &&
-      state == UNINITIALIZED &&
-      lookup.holder()->IsGlobalObject() &&
-      lookup.type() == NORMAL;
-
-  if (can_be_inlined) {
-    Map* map = lookup.holder()->map();
-    // Property's index in the properties array.  If negative we have
-    // an inobject property.
-    int index = lookup.GetFieldIndex() - map->inobject_properties();
-    if (index < 0) {
-      // Index is an offset from the end of the object.
-      int offset = map->instance_size() + (index * kPointerSize);
-      if (PatchInlinedLoad(address(), map, offset)) {
-        set_target(megamorphic_stub());
-        TRACE_IC_NAMED("[LoadIC : inline patch %s]\n", name);
-        return lookup.holder()->FastPropertyAt(lookup.GetFieldIndex());
-      } else {
-        TRACE_IC_NAMED("[LoadIC : no inline patch %s (patching failed)]\n",
-                       name);
-      }
-    } else {
-      TRACE_IC_NAMED("[LoadIC : no inline patch %s (not inobject)]\n", name);
-    }
-  } else if (can_be_inlined_contextual) {
-    Map* map = lookup.holder()->map();
-    JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(
-        lookup.holder()->property_dictionary()->ValueAt(
-            lookup.GetDictionaryEntry()));
-    if (PatchInlinedContextualLoad(address(),
-                                   map,
-                                   cell,
-                                   lookup.IsDontDelete())) {
-      set_target(megamorphic_stub());
-      TRACE_IC_NAMED("[LoadIC : inline contextual patch %s]\n", name);
-      ASSERT(cell->value() != isolate()->heap()->the_hole_value());
-      return cell->value();
-    }
-  } else {
-    if (FLAG_use_ic && state == PREMONOMORPHIC) {
-      TRACE_IC_NAMED("[LoadIC : no inline patch %s (not inlinable)]\n", name);
-    }
-  }
-
   // Update inline cache and stub cache.
   if (FLAG_use_ic) {
     UpdateCaches(&lookup, state, object, name);
@@ -1294,18 +1185,6 @@
 #ifdef DEBUG
     TraceIC("KeyedLoadIC", key, state, target());
 #endif  // DEBUG
-
-    // For JSObjects with fast elements that are not value wrappers
-    // and that do not have indexed interceptors, we initialize the
-    // inlined fast case (if present) by patching the inlined map
-    // check.
-    if (object->IsJSObject() &&
-        !object->IsJSValue() &&
-        !JSObject::cast(*object)->HasIndexedInterceptor() &&
-        JSObject::cast(*object)->HasFastElements()) {
-      Map* map = JSObject::cast(*object)->map();
-      PatchInlinedLoad(address(), map);
-    }
   }
 
   // Get the property.
@@ -1471,57 +1350,7 @@
     LookupResult lookup;
 
     if (LookupForWrite(*receiver, *name, &lookup)) {
-      bool can_be_inlined =
-          state == UNINITIALIZED &&
-          lookup.IsProperty() &&
-          lookup.holder() == *receiver &&
-          lookup.type() == FIELD &&
-          !receiver->IsAccessCheckNeeded();
-
-      if (can_be_inlined) {
-        Map* map = lookup.holder()->map();
-        // Property's index in the properties array.  If negative we have
-        // an inobject property.
-        int index = lookup.GetFieldIndex() - map->inobject_properties();
-        if (index < 0) {
-          // Index is an offset from the end of the object.
-          int offset = map->instance_size() + (index * kPointerSize);
-          if (PatchInlinedStore(address(), map, offset)) {
-            set_target((strict_mode == kStrictMode)
-                         ? megamorphic_stub_strict()
-                         : megamorphic_stub());
-#ifdef DEBUG
-            if (FLAG_trace_ic) {
-              PrintF("[StoreIC : inline patch %s]\n", *name->ToCString());
-            }
-#endif
-            return receiver->SetProperty(*name, *value, NONE, strict_mode);
-#ifdef DEBUG
-
-          } else {
-            if (FLAG_trace_ic) {
-              PrintF("[StoreIC : no inline patch %s (patching failed)]\n",
-                     *name->ToCString());
-            }
-          }
-        } else {
-          if (FLAG_trace_ic) {
-            PrintF("[StoreIC : no inline patch %s (not inobject)]\n",
-                   *name->ToCString());
-          }
-        }
-      } else {
-        if (state == PREMONOMORPHIC) {
-          if (FLAG_trace_ic) {
-            PrintF("[StoreIC : no inline patch %s (not inlinable)]\n",
-                   *name->ToCString());
-#endif
-          }
-        }
-      }
-
-      // If no inlined store ic was patched, generate a stub for this
-      // store.
+      // Generate a stub for this store.
       UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
     } else {
       // Strict mode doesn't allow setting non-existent global property
@@ -1990,6 +1819,7 @@
     case INT32: return "Int32s";
     case HEAP_NUMBER: return "HeapNumbers";
     case ODDBALL: return "Oddball";
+    case BOTH_STRING: return "BothStrings";
     case STRING: return "Strings";
     case GENERIC: return "Generic";
     default: return "Invalid";
@@ -2005,6 +1835,7 @@
     case INT32:
     case HEAP_NUMBER:
     case ODDBALL:
+    case BOTH_STRING:
     case STRING:
       return MONOMORPHIC;
     case GENERIC:
@@ -2019,12 +1850,17 @@
                                                TRBinaryOpIC::TypeInfo y) {
   if (x == UNINITIALIZED) return y;
   if (y == UNINITIALIZED) return x;
-  if (x == STRING && y == STRING) return STRING;
-  if (x == STRING || y == STRING) return GENERIC;
-  if (x >= y) return x;
+  if (x == y) return x;
+  if (x == BOTH_STRING && y == STRING) return STRING;
+  if (x == STRING && y == BOTH_STRING) return STRING;
+  if (x == STRING || x == BOTH_STRING || y == STRING || y == BOTH_STRING) {
+    return GENERIC;
+  }
+  if (x > y) return x;
   return y;
 }
 
+
 TRBinaryOpIC::TypeInfo TRBinaryOpIC::GetTypeInfo(Handle<Object> left,
                                                  Handle<Object> right) {
   ::v8::internal::TypeInfo left_type =
@@ -2046,9 +1882,11 @@
     return HEAP_NUMBER;
   }
 
-  if (left_type.IsString() || right_type.IsString()) {
-    // Patching for fast string ADD makes sense even if only one of the
-    // arguments is a string.
+  // Patching for fast string ADD makes sense even if only one of the
+  // arguments is a string.
+  if (left_type.IsString())  {
+    return right_type.IsString() ? BOTH_STRING : STRING;
+  } else if (right_type.IsString()) {
     return STRING;
   }
 
@@ -2081,11 +1919,11 @@
   TRBinaryOpIC::TypeInfo type = TRBinaryOpIC::GetTypeInfo(left, right);
   type = TRBinaryOpIC::JoinTypes(type, previous_type);
   TRBinaryOpIC::TypeInfo result_type = TRBinaryOpIC::UNINITIALIZED;
-  if (type == TRBinaryOpIC::STRING && op != Token::ADD) {
+  if ((type == TRBinaryOpIC::STRING || type == TRBinaryOpIC::BOTH_STRING) &&
+      op != Token::ADD) {
     type = TRBinaryOpIC::GENERIC;
   }
-  if (type == TRBinaryOpIC::SMI &&
-      previous_type == TRBinaryOpIC::SMI) {
+  if (type == TRBinaryOpIC::SMI && previous_type == TRBinaryOpIC::SMI) {
     if (op == Token::DIV || op == Token::MUL || kSmiValueSize == 32) {
       // Arithmetic on two Smi inputs has yielded a heap number.
       // That is the only way to get here from the Smi stub.
@@ -2097,8 +1935,7 @@
       result_type = TRBinaryOpIC::INT32;
     }
   }
-  if (type == TRBinaryOpIC::INT32 &&
-      previous_type == TRBinaryOpIC::INT32) {
+  if (type == TRBinaryOpIC::INT32 && previous_type == TRBinaryOpIC::INT32) {
     // We must be here because an operation on two INT32 types overflowed.
     result_type = TRBinaryOpIC::HEAP_NUMBER;
   }
diff --git a/src/ic.h b/src/ic.h
index 911cbd8..7b7ab43 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -296,14 +296,6 @@
                                    bool support_wrappers);
   static void GenerateFunctionPrototype(MacroAssembler* masm);
 
-  // Clear the use of the inlined version.
-  static void ClearInlinedVersion(Address address);
-
-  // The offset from the inlined patch site to the start of the
-  // inlined load instruction.  It is architecture-dependent, and not
-  // used on ARM.
-  static const int kOffsetToLoadInstruction;
-
  private:
   // Update the inline cache and the global stub cache based on the
   // lookup result.
@@ -328,13 +320,6 @@
 
   static void Clear(Address address, Code* target);
 
-  static bool PatchInlinedLoad(Address address, Object* map, int index);
-
-  static bool PatchInlinedContextualLoad(Address address,
-                                         Object* map,
-                                         Object* cell,
-                                         bool is_dont_delete);
-
   friend class IC;
 };
 
@@ -361,9 +346,6 @@
 
   static void GenerateIndexedInterceptor(MacroAssembler* masm);
 
-  // Clear the use of the inlined version.
-  static void ClearInlinedVersion(Address address);
-
   // Bit mask to be tested against bit field for the cases when
   // generic stub should go into slow case.
   // Access check is necessary explicitly since generic stub does not perform
@@ -407,10 +389,6 @@
 
   static void Clear(Address address, Code* target);
 
-  // Support for patching the map that is checked in an inlined
-  // version of keyed load.
-  static bool PatchInlinedLoad(Address address, Object* map);
-
   friend class IC;
 };
 
@@ -437,13 +415,6 @@
   static void GenerateGlobalProxy(MacroAssembler* masm,
                                   StrictModeFlag strict_mode);
 
-  // Clear the use of an inlined version.
-  static void ClearInlinedVersion(Address address);
-
-  // The offset from the inlined patch site to the start of the
-  // inlined store instruction.
-  static const int kOffsetToStoreInstruction;
-
  private:
   // Update the inline cache and the global stub cache based on the
   // lookup result.
@@ -489,10 +460,6 @@
 
   static void Clear(Address address, Code* target);
 
-  // Support for patching the index and the map that is checked in an
-  // inlined version of the named store.
-  static bool PatchInlinedStore(Address address, Object* map, int index);
-
   friend class IC;
 };
 
@@ -514,12 +481,6 @@
                                          StrictModeFlag strict_mode);
   static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode);
 
-  // Clear the inlined version so the IC is always hit.
-  static void ClearInlinedVersion(Address address);
-
-  // Restore the inlined version so the fast case can get hit.
-  static void RestoreInlinedVersion(Address address);
-
  private:
   // Update the inline cache.
   void UpdateCaches(LookupResult* lookup,
@@ -564,14 +525,6 @@
 
   static void Clear(Address address, Code* target);
 
-  // Support for patching the map that is checked in an inlined
-  // version of keyed store.
-  // The address is the patch point for the IC call
-  // (Assembler::kCallTargetAddressOffset before the end of
-  // the call/return address).
-  // The map is the new map that the inlined code should check against.
-  static bool PatchInlinedStore(Address address, Object* map);
-
   friend class IC;
 };
 
@@ -586,6 +539,7 @@
     INT32,
     HEAP_NUMBER,
     ODDBALL,
+    BOTH_STRING,  // Only used for addition operation.
     STRING,  // Only used for addition operation.  At least one string operand.
     GENERIC
   };
diff --git a/src/natives.h b/src/natives.h
index 1df94b0..92f0d90 100644
--- a/src/natives.h
+++ b/src/natives.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -36,7 +36,7 @@
                                      int index);
 
 enum NativeType {
-  CORE, D8, I18N
+  CORE, EXPERIMENTAL, D8, I18N
 };
 
 template <NativeType type>
@@ -57,6 +57,7 @@
 };
 
 typedef NativesCollection<CORE> Natives;
+typedef NativesCollection<EXPERIMENTAL> ExperimentalNatives;
 
 } }  // namespace v8::internal
 
diff --git a/src/proxy.js b/src/proxy.js
new file mode 100644
index 0000000..2516983
--- /dev/null
+++ b/src/proxy.js
@@ -0,0 +1,28 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+global.Proxy = new $Object();
diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc
index 97f0341..8d258ac 100644
--- a/src/runtime-profiler.cc
+++ b/src/runtime-profiler.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -153,6 +153,7 @@
   if (FLAG_trace_opt) {
     PrintF("[marking (%s) ", eager ? "eagerly" : "lazily");
     function->PrintName();
+    PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
     PrintF(" for recompilation");
     if (delay > 0) {
       PrintF(" (delayed %0.3f ms)", static_cast<double>(delay) / 1000);
diff --git a/src/runtime.cc b/src/runtime.cc
index ceb6c10..53c048e 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -6597,9 +6597,16 @@
   int exponent = number->get_exponent();
   int sign = number->get_sign();
 
-  // We compare with kSmiValueSize - 3 because (2^30 - 0.1) has exponent 29 and
-  // should be rounded to 2^30, which is not smi.
-  if (!sign && exponent <= kSmiValueSize - 3) {
+  if (exponent < -1) {
+    // Number in range ]-0.5..0.5[. These always round to +/-zero.
+    if (sign) return isolate->heap()->minus_zero_value();
+    return Smi::FromInt(0);
+  }
+
+  // We compare with kSmiValueSize - 2 because (2^30 - 0.1) has exponent 29 and
+  // should be rounded to 2^30, which is not smi (for 31-bit smis, similar
+  // agument holds for 32-bit smis).
+  if (!sign && exponent < kSmiValueSize - 2) {
     return Smi::FromInt(static_cast<int>(value + 0.5));
   }
 
@@ -10478,7 +10485,7 @@
   // Recursively copy the with contexts.
   Handle<Context> previous(context_chain->previous());
   Handle<JSObject> extension(JSObject::cast(context_chain->extension()));
-  Handle<Context> context = CopyWithContextChain(function_context, previous);
+  Handle<Context> context = CopyWithContextChain(previous, function_context);
   return context->GetIsolate()->factory()->NewWithContext(
       context, extension, context_chain->IsCatchContext());
 }
diff --git a/src/type-info.cc b/src/type-info.cc
index 4069c83..02f69d0 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -69,8 +69,8 @@
 }
 
 
-Handle<Object> TypeFeedbackOracle::GetInfo(int pos) {
-  int entry = dictionary_->FindEntry(pos);
+Handle<Object> TypeFeedbackOracle::GetInfo(unsigned ast_id) {
+  int entry = dictionary_->FindEntry(ast_id);
   return entry != NumberDictionary::kNotFound
       ? Handle<Object>(dictionary_->ValueAt(entry))
       : Isolate::Current()->factory()->undefined_value();
@@ -78,7 +78,7 @@
 
 
 bool TypeFeedbackOracle::LoadIsMonomorphic(Property* expr) {
-  Handle<Object> map_or_code(GetInfo(expr->position()));
+  Handle<Object> map_or_code(GetInfo(expr->id()));
   if (map_or_code->IsMap()) return true;
   if (map_or_code->IsCode()) {
     Handle<Code> code(Code::cast(*map_or_code));
@@ -90,7 +90,7 @@
 
 
 bool TypeFeedbackOracle::StoreIsMonomorphic(Expression* expr) {
-  Handle<Object> map_or_code(GetInfo(expr->position()));
+  Handle<Object> map_or_code(GetInfo(expr->id()));
   if (map_or_code->IsMap()) return true;
   if (map_or_code->IsCode()) {
     Handle<Code> code(Code::cast(*map_or_code));
@@ -102,7 +102,7 @@
 
 
 bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) {
-  Handle<Object> value = GetInfo(expr->position());
+  Handle<Object> value = GetInfo(expr->id());
   return value->IsMap() || value->IsSmi();
 }
 
@@ -110,7 +110,7 @@
 Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
   ASSERT(LoadIsMonomorphic(expr));
   Handle<Object> map_or_code(
-      Handle<HeapObject>::cast(GetInfo(expr->position())));
+      Handle<HeapObject>::cast(GetInfo(expr->id())));
   if (map_or_code->IsCode()) {
     Handle<Code> code(Code::cast(*map_or_code));
     return Handle<Map>(code->FindFirstMap());
@@ -122,7 +122,7 @@
 Handle<Map> TypeFeedbackOracle::StoreMonomorphicReceiverType(Expression* expr) {
   ASSERT(StoreIsMonomorphic(expr));
   Handle<HeapObject> map_or_code(
-      Handle<HeapObject>::cast(GetInfo(expr->position())));
+      Handle<HeapObject>::cast(GetInfo(expr->id())));
   if (map_or_code->IsCode()) {
     Handle<Code> code(Code::cast(*map_or_code));
     return Handle<Map>(code->FindFirstMap());
@@ -134,14 +134,14 @@
 ZoneMapList* TypeFeedbackOracle::LoadReceiverTypes(Property* expr,
                                                    Handle<String> name) {
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL);
-  return CollectReceiverTypes(expr->position(), name, flags);
+  return CollectReceiverTypes(expr->id(), name, flags);
 }
 
 
 ZoneMapList* TypeFeedbackOracle::StoreReceiverTypes(Assignment* expr,
                                                     Handle<String> name) {
   Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, NORMAL);
-  return CollectReceiverTypes(expr->position(), name, flags);
+  return CollectReceiverTypes(expr->id(), name, flags);
 }
 
 
@@ -158,12 +158,12 @@
                                                     OWN_MAP,
                                                     NOT_IN_LOOP,
                                                     arity);
-  return CollectReceiverTypes(expr->position(), name, flags);
+  return CollectReceiverTypes(expr->id(), name, flags);
 }
 
 
 CheckType TypeFeedbackOracle::GetCallCheckType(Call* expr) {
-  Handle<Object> value = GetInfo(expr->position());
+  Handle<Object> value = GetInfo(expr->id());
   if (!value->IsSmi()) return RECEIVER_MAP_CHECK;
   CheckType check = static_cast<CheckType>(Smi::cast(*value)->value());
   ASSERT(check != RECEIVER_MAP_CHECK);
@@ -172,14 +172,14 @@
 
 ExternalArrayType TypeFeedbackOracle::GetKeyedLoadExternalArrayType(
     Property* expr) {
-  Handle<Object> stub = GetInfo(expr->position());
+  Handle<Object> stub = GetInfo(expr->id());
   ASSERT(stub->IsCode());
   return Code::cast(*stub)->external_array_type();
 }
 
 ExternalArrayType TypeFeedbackOracle::GetKeyedStoreExternalArrayType(
     Expression* expr) {
-  Handle<Object> stub = GetInfo(expr->position());
+  Handle<Object> stub = GetInfo(expr->id());
   ASSERT(stub->IsCode());
   return Code::cast(*stub)->external_array_type();
 }
@@ -207,13 +207,13 @@
 
 
 bool TypeFeedbackOracle::LoadIsBuiltin(Property* expr, Builtins::Name id) {
-  return *GetInfo(expr->position()) ==
+  return *GetInfo(expr->id()) ==
       Isolate::Current()->builtins()->builtin(id);
 }
 
 
 TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr) {
-  Handle<Object> object = GetInfo(expr->position());
+  Handle<Object> object = GetInfo(expr->id());
   TypeInfo unknown = TypeInfo::Unknown();
   if (!object->IsCode()) return unknown;
   Handle<Code> code = Handle<Code>::cast(object);
@@ -240,7 +240,7 @@
 
 
 TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr) {
-  Handle<Object> object = GetInfo(expr->position());
+  Handle<Object> object = GetInfo(expr->id());
   TypeInfo unknown = TypeInfo::Unknown();
   if (!object->IsCode()) return unknown;
   Handle<Code> code = Handle<Code>::cast(object);
@@ -275,6 +275,8 @@
         return TypeInfo::Integer32();
       case TRBinaryOpIC::HEAP_NUMBER:
         return TypeInfo::Double();
+      case TRBinaryOpIC::BOTH_STRING:
+        return TypeInfo::String();
       case TRBinaryOpIC::STRING:
       case TRBinaryOpIC::GENERIC:
         return unknown;
@@ -287,7 +289,7 @@
 
 
 TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) {
-  Handle<Object> object = GetInfo(clause->position());
+  Handle<Object> object = GetInfo(clause->label()->id());
   TypeInfo unknown = TypeInfo::Unknown();
   if (!object->IsCode()) return unknown;
   Handle<Code> code = Handle<Code>::cast(object);
@@ -313,11 +315,11 @@
 }
 
 
-ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(int position,
+ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id,
                                                       Handle<String> name,
                                                       Code::Flags flags) {
   Isolate* isolate = Isolate::Current();
-  Handle<Object> object = GetInfo(position);
+  Handle<Object> object = GetInfo(ast_id);
   if (object->IsUndefined() || object->IsSmi()) return NULL;
 
   if (*object == isolate->builtins()->builtin(Builtins::kStoreIC_GlobalProxy)) {
@@ -340,8 +342,9 @@
 }
 
 
-void TypeFeedbackOracle::SetInfo(int position, Object* target) {
-  MaybeObject* maybe_result = dictionary_->AtNumberPut(position, target);
+void TypeFeedbackOracle::SetInfo(unsigned ast_id, Object* target) {
+  ASSERT(dictionary_->FindEntry(ast_id) == NumberDictionary::kNotFound);
+  MaybeObject* maybe_result = dictionary_->AtNumberPut(ast_id, target);
   USE(maybe_result);
 #ifdef DEBUG
   Object* result;
@@ -358,53 +361,47 @@
 
   const int kInitialCapacity = 16;
   List<int> code_positions(kInitialCapacity);
-  List<int> source_positions(kInitialCapacity);
-  CollectPositions(*code, &code_positions, &source_positions);
+  List<unsigned> ast_ids(kInitialCapacity);
+  CollectIds(*code, &code_positions, &ast_ids);
 
   ASSERT(dictionary_.is_null());  // Only initialize once.
   dictionary_ = isolate->factory()->NewNumberDictionary(
       code_positions.length());
 
-  int length = code_positions.length();
-  ASSERT(source_positions.length() == length);
+  const int length = code_positions.length();
+  ASSERT(ast_ids.length() == length);
   for (int i = 0; i < length; i++) {
     AssertNoAllocation no_allocation;
     RelocInfo info(code->instruction_start() + code_positions[i],
                    RelocInfo::CODE_TARGET, 0);
     Code* target = Code::GetCodeFromTargetAddress(info.target_address());
-    int position = source_positions[i];
+    unsigned id = ast_ids[i];
     InlineCacheState state = target->ic_state();
     Code::Kind kind = target->kind();
 
     if (kind == Code::TYPE_RECORDING_BINARY_OP_IC ||
         kind == Code::COMPARE_IC) {
-      // TODO(kasperl): Avoid having multiple ICs with the same
-      // position by making sure that we have position information
-      // recorded for all binary ICs.
-      int entry = dictionary_->FindEntry(position);
-      if (entry == NumberDictionary::kNotFound) {
-        SetInfo(position, target);
-      }
+      SetInfo(id, target);
     } else if (state == MONOMORPHIC) {
       if (kind == Code::KEYED_EXTERNAL_ARRAY_LOAD_IC ||
           kind == Code::KEYED_EXTERNAL_ARRAY_STORE_IC) {
-        SetInfo(position, target);
-      } else if (target->kind() != Code::CALL_IC ||
-          target->check_type() == RECEIVER_MAP_CHECK) {
+        SetInfo(id, target);
+      } else if (kind != Code::CALL_IC ||
+                 target->check_type() == RECEIVER_MAP_CHECK) {
         Map* map = target->FindFirstMap();
         if (map == NULL) {
-          SetInfo(position, target);
+          SetInfo(id, target);
         } else {
-          SetInfo(position, map);
+          SetInfo(id, map);
         }
       } else {
         ASSERT(target->kind() == Code::CALL_IC);
         CheckType check = target->check_type();
         ASSERT(check != RECEIVER_MAP_CHECK);
-        SetInfo(position, Smi::FromInt(check));
+        SetInfo(id,  Smi::FromInt(check));
       }
     } else if (state == MEGAMORPHIC) {
-      SetInfo(position, target);
+      SetInfo(id, target);
     }
   }
   // Allocate handle in the parent scope.
@@ -412,41 +409,31 @@
 }
 
 
-void TypeFeedbackOracle::CollectPositions(Code* code,
-                                          List<int>* code_positions,
-                                          List<int>* source_positions) {
+void TypeFeedbackOracle::CollectIds(Code* code,
+                                    List<int>* code_positions,
+                                    List<unsigned>* ast_ids) {
   AssertNoAllocation no_allocation;
-  int position = 0;
-  // Because the ICs we use for global variables access in the full
-  // code generator do not have any meaningful positions, we avoid
-  // collecting those by filtering out contextual code targets.
-  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
-      RelocInfo::kPositionMask;
+  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
   for (RelocIterator it(code, mask); !it.done(); it.next()) {
     RelocInfo* info = it.rinfo();
-    RelocInfo::Mode mode = info->rmode();
-    if (RelocInfo::IsCodeTarget(mode)) {
-      Code* target = Code::GetCodeFromTargetAddress(info->target_address());
-      if (target->is_inline_cache_stub()) {
-        InlineCacheState state = target->ic_state();
-        Code::Kind kind = target->kind();
-        if (kind == Code::TYPE_RECORDING_BINARY_OP_IC) {
-          if (target->type_recording_binary_op_type() ==
-              TRBinaryOpIC::GENERIC) {
-            continue;
-          }
-        } else if (kind == Code::COMPARE_IC) {
-          if (target->compare_state() == CompareIC::GENERIC) continue;
-        } else {
-          if (state != MONOMORPHIC && state != MEGAMORPHIC) continue;
+    ASSERT(RelocInfo::IsCodeTarget(info->rmode()));
+    Code* target = Code::GetCodeFromTargetAddress(info->target_address());
+    if (target->is_inline_cache_stub()) {
+      InlineCacheState state = target->ic_state();
+      Code::Kind kind = target->kind();
+      if (kind == Code::TYPE_RECORDING_BINARY_OP_IC) {
+        if (target->type_recording_binary_op_type() ==
+            TRBinaryOpIC::GENERIC) {
+          continue;
         }
-        code_positions->Add(
-            static_cast<int>(info->pc() - code->instruction_start()));
-        source_positions->Add(position);
+      } else if (kind == Code::COMPARE_IC) {
+        if (target->compare_state() == CompareIC::GENERIC) continue;
+      } else {
+        if (state != MONOMORPHIC && state != MEGAMORPHIC) continue;
       }
-    } else {
-      ASSERT(RelocInfo::IsPosition(mode));
-      position = static_cast<int>(info->data());
+      code_positions->Add(
+          static_cast<int>(info->pc() - code->instruction_start()));
+      ast_ids->Add(static_cast<unsigned>(info->data()));
     }
   }
 }
diff --git a/src/type-info.h b/src/type-info.h
index f6e6729..905625a 100644
--- a/src/type-info.h
+++ b/src/type-info.h
@@ -36,18 +36,18 @@
 namespace internal {
 
 //         Unknown
-//           |   |
-//           |   \--------------|
-//      Primitive             Non-primitive
-//           |   \--------|     |
-//         Number      String   |
-//         /    |         |     |
-//    Double  Integer32   |    /
-//        |      |       /    /
-//        |     Smi     /    /
-//        |      |     /    /
-//        |      |    /    /
-//        Uninitialized.--/
+//           |   \____________
+//           |                |
+//      Primitive       Non-primitive
+//           |   \_______     |
+//           |           |    |
+//        Number       String |
+//         /   \         |    |
+//    Double  Integer32  |   /
+//        |      |      /   /
+//        |     Smi    /   /
+//        |      |    / __/
+//        Uninitialized.
 
 class TypeInfo {
  public:
@@ -263,21 +263,21 @@
   TypeInfo SwitchType(CaseClause* clause);
 
  private:
-  ZoneMapList* CollectReceiverTypes(int position,
+  ZoneMapList* CollectReceiverTypes(unsigned ast_id,
                                     Handle<String> name,
                                     Code::Flags flags);
 
-  void SetInfo(int position, Object* target);
+  void SetInfo(unsigned ast_id, Object* target);
 
   void PopulateMap(Handle<Code> code);
 
-  void CollectPositions(Code* code,
-                        List<int>* code_positions,
-                        List<int>* source_positions);
+  void CollectIds(Code* code,
+                  List<int>* code_positions,
+                  List<unsigned>* ast_ids);
 
   // Returns an element from the backing store. Returns undefined if
   // there is no information.
-  Handle<Object> GetInfo(int pos);
+  Handle<Object> GetInfo(unsigned ast_id);
 
   Handle<Context> global_context_;
   Handle<NumberDictionary> dictionary_;
diff --git a/src/version.cc b/src/version.cc
index 0fe0d0b..bf93be1 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -33,9 +33,9 @@
 // NOTE these macros are used by the SCons build script so their names
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     3
-#define MINOR_VERSION     2
-#define BUILD_NUMBER      10
-#define PATCH_LEVEL       2
+#define MINOR_VERSION     3
+#define BUILD_NUMBER      0
+#define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index 9541a58..8db54f0 100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -61,9 +61,15 @@
 }
 
 
-void Assembler::emit_code_target(Handle<Code> target, RelocInfo::Mode rmode) {
+void Assembler::emit_code_target(Handle<Code> target,
+                                 RelocInfo::Mode rmode,
+                                 unsigned ast_id) {
   ASSERT(RelocInfo::IsCodeTarget(rmode));
-  RecordRelocInfo(rmode);
+  if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
+    RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id);
+  } else {
+    RecordRelocInfo(rmode);
+  }
   int current = code_targets_.length();
   if (current > 0 && code_targets_.last().is_identical_to(target)) {
     // Optimization if we keep jumping to the same code target.
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index 9fe212e..c5e35af 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -869,12 +869,14 @@
 }
 
 
-void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) {
+void Assembler::call(Handle<Code> target,
+                     RelocInfo::Mode rmode,
+                     unsigned ast_id) {
   positions_recorder()->WriteRecordedPositions();
   EnsureSpace ensure_space(this);
   // 1110 1000 #32-bit disp.
   emit(0xE8);
-  emit_code_target(target, rmode);
+  emit_code_target(target, rmode, ast_id);
 }
 
 
@@ -2540,6 +2542,24 @@
 }
 
 
+void Assembler::movq(XMMRegister dst, XMMRegister src) {
+  EnsureSpace ensure_space(this);
+  if (dst.low_bits() == 4) {
+    // Avoid unnecessary SIB byte.
+    emit(0xf3);
+    emit_optional_rex_32(dst, src);
+    emit(0x0F);
+    emit(0x7e);
+    emit_sse_operand(dst, src);
+  } else {
+    emit(0x66);
+    emit_optional_rex_32(src, dst);
+    emit(0x0F);
+    emit(0xD6);
+    emit_sse_operand(src, dst);
+  }
+}
+
 void Assembler::movdqa(const Operand& dst, XMMRegister src) {
   EnsureSpace ensure_space(this);
   emit(0x66);
@@ -2603,6 +2623,42 @@
 }
 
 
+void Assembler::movaps(XMMRegister dst, XMMRegister src) {
+  EnsureSpace ensure_space(this);
+  if (src.low_bits() == 4) {
+    // Try to avoid an unnecessary SIB byte.
+    emit_optional_rex_32(src, dst);
+    emit(0x0F);
+    emit(0x29);
+    emit_sse_operand(src, dst);
+  } else {
+    emit_optional_rex_32(dst, src);
+    emit(0x0F);
+    emit(0x28);
+    emit_sse_operand(dst, src);
+  }
+}
+
+
+void Assembler::movapd(XMMRegister dst, XMMRegister src) {
+  EnsureSpace ensure_space(this);
+  if (src.low_bits() == 4) {
+    // Try to avoid an unnecessary SIB byte.
+    emit(0x66);
+    emit_optional_rex_32(src, dst);
+    emit(0x0F);
+    emit(0x29);
+    emit_sse_operand(src, dst);
+  } else {
+    emit(0x66);
+    emit_optional_rex_32(dst, src);
+    emit(0x0F);
+    emit(0x28);
+    emit_sse_operand(dst, src);
+  }
+}
+
+
 void Assembler::movss(XMMRegister dst, const Operand& src) {
   EnsureSpace ensure_space(this);
   emit(0xF3);  // single
@@ -2833,6 +2889,15 @@
 }
 
 
+void Assembler::xorps(XMMRegister dst, XMMRegister src) {
+  EnsureSpace ensure_space(this);
+  emit_optional_rex_32(dst, src);
+  emit(0x0F);
+  emit(0x57);
+  emit_sse_operand(dst, src);
+}
+
+
 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
   EnsureSpace ensure_space(this);
   emit(0xF2);
@@ -2863,6 +2928,21 @@
 }
 
 
+void Assembler::roundsd(XMMRegister dst, XMMRegister src,
+                        Assembler::RoundingMode mode) {
+  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(dst, src);
+  emit(0x0f);
+  emit(0x3a);
+  emit(0x0b);
+  emit_sse_operand(dst, src);
+  // Mask precision exeption.
+  emit(static_cast<byte>(mode) | 0x8);
+}
+
+
 void Assembler::movmskpd(Register dst, XMMRegister src) {
   EnsureSpace ensure_space(this);
   emit(0x66);
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 9453277..32db4b8 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -1183,7 +1183,9 @@
   // Calls
   // Call near relative 32-bit displacement, relative to next instruction.
   void call(Label* L);
-  void call(Handle<Code> target, RelocInfo::Mode rmode);
+  void call(Handle<Code> target,
+            RelocInfo::Mode rmode,
+            unsigned ast_id = kNoASTId);
 
   // Calls directly to the given address using a relative offset.
   // Should only ever be used in Code objects for calls within the
@@ -1291,15 +1293,24 @@
   void movd(Register dst, XMMRegister src);
   void movq(XMMRegister dst, Register src);
   void movq(Register dst, XMMRegister src);
+  void movq(XMMRegister dst, XMMRegister src);
   void extractps(Register dst, XMMRegister src, byte imm8);
 
-  void movsd(const Operand& dst, XMMRegister src);
+  // Don't use this unless it's important to keep the
+  // top half of the destination register unchanged.
+  // Used movaps when moving double values and movq for integer
+  // values in xmm registers.
   void movsd(XMMRegister dst, XMMRegister src);
+
+  void movsd(const Operand& dst, XMMRegister src);
   void movsd(XMMRegister dst, const Operand& src);
 
   void movdqa(const Operand& dst, XMMRegister src);
   void movdqa(XMMRegister dst, const Operand& src);
 
+  void movapd(XMMRegister dst, XMMRegister src);
+  void movaps(XMMRegister dst, XMMRegister src);
+
   void movss(XMMRegister dst, const Operand& src);
   void movss(const Operand& dst, XMMRegister src);
 
@@ -1331,11 +1342,21 @@
   void andpd(XMMRegister dst, XMMRegister src);
   void orpd(XMMRegister dst, XMMRegister src);
   void xorpd(XMMRegister dst, XMMRegister src);
+  void xorps(XMMRegister dst, XMMRegister src);
   void sqrtsd(XMMRegister dst, XMMRegister src);
 
   void ucomisd(XMMRegister dst, XMMRegister src);
   void ucomisd(XMMRegister dst, const Operand& src);
 
+  enum RoundingMode {
+    kRoundToNearest = 0x0,
+    kRoundDown      = 0x1,
+    kRoundUp        = 0x2,
+    kRoundToZero    = 0x3
+  };
+
+  void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode);
+
   void movmskpd(Register dst, XMMRegister src);
 
   // The first argument is the reg field, the second argument is the r/m field.
@@ -1408,7 +1429,9 @@
   inline void emitl(uint32_t x);
   inline void emitq(uint64_t x, RelocInfo::Mode rmode);
   inline void emitw(uint16_t x);
-  inline void emit_code_target(Handle<Code> target, RelocInfo::Mode rmode);
+  inline void emit_code_target(Handle<Code> target,
+                               RelocInfo::Mode rmode,
+                               unsigned ast_id = kNoASTId);
   void emit(Immediate x) { emitl(x.value_); }
 
   // Emits a REX prefix that encodes a 64-bit operand size and
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 11727a0..76fcc88 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -266,7 +266,7 @@
   __ j(not_equal, &true_result);
   // HeapNumber => false iff +0, -0, or NaN.
   // These three cases set the zero flag when compared to zero using ucomisd.
-  __ xorpd(xmm0, xmm0);
+  __ xorps(xmm0, xmm0);
   __ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
   __ j(zero, &false_result);
   // Fall through to |true_result|.
@@ -372,6 +372,9 @@
     case TRBinaryOpIC::ODDBALL:
       GenerateOddballStub(masm);
       break;
+    case TRBinaryOpIC::BOTH_STRING:
+      GenerateBothStringStub(masm);
+      break;
     case TRBinaryOpIC::STRING:
       GenerateStringStub(masm);
       break;
@@ -771,6 +774,36 @@
 }
 
 
+void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
+  Label call_runtime;
+  ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING);
+  ASSERT(op_ == Token::ADD);
+  // If both arguments are strings, call the string add stub.
+  // Otherwise, do a transition.
+
+  // Registers containing left and right operands respectively.
+  Register left = rdx;
+  Register right = rax;
+
+  // Test if left operand is a string.
+  __ JumpIfSmi(left, &call_runtime);
+  __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
+  __ j(above_equal, &call_runtime);
+
+  // Test if right operand is a string.
+  __ JumpIfSmi(right, &call_runtime);
+  __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx);
+  __ j(above_equal, &call_runtime);
+
+  StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
+  GenerateRegisterArgsPush(masm);
+  __ TailCallStub(&string_add_stub);
+
+  __ bind(&call_runtime);
+  GenerateTypeTransition(masm);
+}
+
+
 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
   Label call_runtime;
 
@@ -1569,7 +1602,7 @@
   __ bind(&no_neg);
 
   // Load xmm1 with 1.
-  __ movsd(xmm1, xmm3);
+  __ movaps(xmm1, xmm3);
   NearLabel while_true;
   NearLabel no_multiply;
 
@@ -1587,8 +1620,8 @@
   __ j(positive, &allocate_return);
   // Special case if xmm1 has reached infinity.
   __ divsd(xmm3, xmm1);
-  __ movsd(xmm1, xmm3);
-  __ xorpd(xmm0, xmm0);
+  __ movaps(xmm1, xmm3);
+  __ xorps(xmm0, xmm0);
   __ ucomisd(xmm0, xmm1);
   __ j(equal, &call_runtime);
 
@@ -1636,11 +1669,11 @@
 
   // Calculates reciprocal of square root.
   // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
-  __ xorpd(xmm1, xmm1);
+  __ xorps(xmm1, xmm1);
   __ addsd(xmm1, xmm0);
   __ sqrtsd(xmm1, xmm1);
   __ divsd(xmm3, xmm1);
-  __ movsd(xmm1, xmm3);
+  __ movaps(xmm1, xmm3);
   __ jmp(&allocate_return);
 
   // Test for 0.5.
@@ -1653,8 +1686,8 @@
   __ j(not_equal, &call_runtime);
   // Calculates square root.
   // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
-  __ xorpd(xmm1, xmm1);
-  __ addsd(xmm1, xmm0);
+  __ xorps(xmm1, xmm1);
+  __ addsd(xmm1, xmm0);  // Convert -0 to 0.
   __ sqrtsd(xmm1, xmm1);
 
   __ bind(&allocate_return);
@@ -2330,9 +2363,10 @@
   // Heap::GetNumberStringCache.
   Label is_smi;
   Label load_result_from_cache;
+  Factory* factory = masm->isolate()->factory();
   if (!object_is_smi) {
     __ JumpIfSmi(object, &is_smi);
-    __ CheckMap(object, FACTORY->heap_number_map(), not_found, true);
+    __ CheckMap(object, factory->heap_number_map(), not_found, true);
 
     STATIC_ASSERT(8 == kDoubleSize);
     __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
@@ -2419,6 +2453,7 @@
   ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
 
   Label check_unequal_objects, done;
+  Factory* factory = masm->isolate()->factory();
 
   // Compare two smis if required.
   if (include_smi_compare_) {
@@ -2466,7 +2501,6 @@
     // Note: if cc_ != equal, never_nan_nan_ is not used.
     // We cannot set rax to EQUAL until just before return because
     // rax must be unchanged on jump to not_identical.
-
     if (never_nan_nan_ && (cc_ == equal)) {
       __ Set(rax, EQUAL);
       __ ret(0);
@@ -2474,7 +2508,7 @@
       NearLabel heap_number;
       // If it's not a heap number, then return equal for (in)equality operator.
       __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
-             FACTORY->heap_number_map());
+             factory->heap_number_map());
       __ j(equal, &heap_number);
       if (cc_ != equal) {
         // Call runtime on identical JSObjects.  Otherwise return equal.
@@ -2519,7 +2553,7 @@
 
         // Check if the non-smi operand is a heap number.
         __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
-               FACTORY->heap_number_map());
+               factory->heap_number_map());
         // If heap number, handle it in the slow case.
         __ j(equal, &slow);
         // Return non-equal.  ebx (the lower half of rbx) is not zero.
@@ -3450,10 +3484,11 @@
     MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
   __ Abort("Unexpected fallthrough to CharCodeAt slow case");
 
+  Factory* factory = masm->isolate()->factory();
   // Index is not a smi.
   __ bind(&index_not_smi_);
   // If index is a heap number, try converting it to an integer.
-  __ CheckMap(index_, FACTORY->heap_number_map(), index_not_number_, true);
+  __ CheckMap(index_, factory->heap_number_map(), index_not_number_, true);
   call_helper.BeforeCall(masm);
   __ push(object_);
   __ push(index_);
diff --git a/src/x64/code-stubs-x64.h b/src/x64/code-stubs-x64.h
index f97d099..3b40280 100644
--- a/src/x64/code-stubs-x64.h
+++ b/src/x64/code-stubs-x64.h
@@ -152,6 +152,7 @@
   void GenerateHeapNumberStub(MacroAssembler* masm);
   void GenerateOddballStub(MacroAssembler* masm);
   void GenerateStringStub(MacroAssembler* masm);
+  void GenerateBothStringStub(MacroAssembler* masm);
   void GenerateGenericStub(MacroAssembler* masm);
 
   void GenerateHeapResultAllocation(MacroAssembler* masm, Label* alloc_failure);
diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc
index 2b7b7b7..82bc6ef 100644
--- a/src/x64/disasm-x64.cc
+++ b/src/x64/disasm-x64.cc
@@ -1021,12 +1021,26 @@
         current += PrintRightOperand(current);
         AppendToBuffer(", %s, %d", NameOfCPURegister(regop), (*current) & 3);
         current += 1;
+      } else if (third_byte == 0x0b) {
+        get_modrm(*current, &mod, &regop, &rm);
+         // roundsd xmm, xmm/m64, imm8
+        AppendToBuffer("roundsd %s, ", NameOfCPURegister(regop));
+        current += PrintRightOperand(current);
+        AppendToBuffer(", %d", (*current) & 3);
+        current += 1;
       } else {
         UnimplementedInstruction();
       }
     } else {
       get_modrm(*current, &mod, &regop, &rm);
-      if (opcode == 0x6E) {
+      if (opcode == 0x28) {
+        AppendToBuffer("movapd %s, ", NameOfXMMRegister(regop));
+        current += PrintRightXMMOperand(current);
+      } else if (opcode == 0x29) {
+        AppendToBuffer("movapd ");
+        current += PrintRightXMMOperand(current);
+        AppendToBuffer(", %s", NameOfXMMRegister(regop));
+      } else if (opcode == 0x6E) {
         AppendToBuffer("mov%c %s,",
                        rex_w() ? 'q' : 'd',
                        NameOfXMMRegister(regop));
@@ -1044,6 +1058,10 @@
         AppendToBuffer("movdqa ");
         current += PrintRightXMMOperand(current);
         AppendToBuffer(", %s", NameOfXMMRegister(regop));
+      } else if (opcode == 0xD6) {
+        AppendToBuffer("movq ");
+        current += PrintRightXMMOperand(current);
+        AppendToBuffer(", %s", NameOfXMMRegister(regop));
       } else {
         const char* mnemonic = "?";
         if (opcode == 0x50) {
@@ -1145,6 +1163,11 @@
       get_modrm(*current, &mod, &regop, &rm);
       AppendToBuffer("cvtss2sd %s,", NameOfXMMRegister(regop));
       current += PrintRightXMMOperand(current);
+    } else if (opcode == 0x7E) {
+      int mod, regop, rm;
+      get_modrm(*current, &mod, &regop, &rm);
+      AppendToBuffer("movq %s, ", NameOfXMMRegister(regop));
+      current += PrintRightXMMOperand(current);
     } else {
       UnimplementedInstruction();
     }
@@ -1162,6 +1185,22 @@
       current += 4;
     }  // else no immediate displacement.
     AppendToBuffer("nop");
+
+  } else if (opcode == 28) {
+    // movaps xmm, xmm/m128
+    int mod, regop, rm;
+    get_modrm(*current, &mod, &regop, &rm);
+    AppendToBuffer("movaps %s, ", NameOfXMMRegister(regop));
+    current += PrintRightXMMOperand(current);
+
+  } else if (opcode == 29) {
+    // movaps xmm/m128, xmm
+    int mod, regop, rm;
+    get_modrm(*current, &mod, &regop, &rm);
+    AppendToBuffer("movaps");
+    current += PrintRightXMMOperand(current);
+    AppendToBuffer(", %s", NameOfXMMRegister(regop));
+
   } else if (opcode == 0xA2 || opcode == 0x31) {
     // RDTSC or CPUID
     AppendToBuffer("%s", mnemonic);
@@ -1173,6 +1212,13 @@
     byte_size_operand_ = idesc.byte_size_operation;
     current += PrintOperands(idesc.mnem, idesc.op_order_, current);
 
+  } else if (opcode == 57) {
+    // xoprps xmm, xmm/m128
+    int mod, regop, rm;
+    get_modrm(*current, &mod, &regop, &rm);
+    AppendToBuffer("xorps %s, ", NameOfXMMRegister(regop));
+    current += PrintRightXMMOperand(current);
+
   } else if ((opcode & 0xF0) == 0x80) {
     // Jcc: Conditional jump (branch).
     current = data + JumpConditional(data);
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 97168cd..8d02f88 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -44,6 +44,12 @@
 #define __ ACCESS_MASM(masm_)
 
 
+static unsigned GetPropertyId(Property* property) {
+  if (property->is_synthetic()) return AstNode::kNoNumber;
+  return property->id();
+}
+
+
 class JumpPatchSite BASE_EMBEDDED {
  public:
   explicit JumpPatchSite(MacroAssembler* masm)
@@ -743,7 +749,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
     }
   }
 }
@@ -816,7 +822,7 @@
     // Record position before stub call for type feedback.
     SetSourcePosition(clause->position());
     Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
-    EmitCallIC(ic, &patch_site);
+    EmitCallIC(ic, &patch_site, clause->label()->id());
 
     __ testq(rax, rax);
     __ j(not_equal, &next_test);
@@ -1125,7 +1131,7 @@
   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
       ? RelocInfo::CODE_TARGET
       : RelocInfo::CODE_TARGET_CONTEXT;
-  EmitCallIC(ic, mode);
+  EmitCallIC(ic, mode, AstNode::kNoNumber);
 }
 
 
@@ -1206,7 +1212,7 @@
           __ Move(rax, key_literal->handle());
           Handle<Code> ic =
               isolate()->builtins()->KeyedLoadIC_Initialize();
-          EmitCallIC(ic, RelocInfo::CODE_TARGET);
+          EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
           __ jmp(done);
         }
       }
@@ -1229,7 +1235,7 @@
     __ Move(rcx, var->name());
     __ movq(rax, GlobalObjectOperand());
     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
     context()->Plug(rax);
 
   } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
@@ -1292,7 +1298,7 @@
 
     // Do a keyed property load.
     Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
     context()->Plug(rax);
   }
 }
@@ -1403,7 +1409,7 @@
           __ movq(rdx, Operand(rsp, 0));
           if (property->emit_store()) {
             Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
-            EmitCallIC(ic, RelocInfo::CODE_TARGET);
+            EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
             PrepareForBailoutForId(key->id(), NO_REGISTERS);
           }
           break;
@@ -1606,13 +1612,13 @@
     SetSourcePosition(expr->position() + 1);
     AccumulatorValueContext context(this);
     if (ShouldInlineSmiCase(op)) {
-      EmitInlineSmiBinaryOp(expr,
+      EmitInlineSmiBinaryOp(expr->binary_operation(),
                             op,
                             mode,
                             expr->target(),
                             expr->value());
     } else {
-      EmitBinaryOp(op, mode);
+      EmitBinaryOp(expr->binary_operation(), op, mode);
     }
     // Deoptimization point in case the binary operation may have side effects.
     PrepareForBailout(expr->binary_operation(), TOS_REG);
@@ -1646,18 +1652,18 @@
   Literal* key = prop->key()->AsLiteral();
   __ Move(rcx, key->handle());
   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
 }
 
 
 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   SetSourcePosition(prop->position());
   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
 }
 
 
-void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
+void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
                                               Token::Value op,
                                               OverwriteMode mode,
                                               Expression* left,
@@ -1675,7 +1681,7 @@
   __ bind(&stub_call);
   __ movq(rax, rcx);
   TypeRecordingBinaryOpStub stub(op, mode);
-  EmitCallIC(stub.GetCode(), &patch_site);
+  EmitCallIC(stub.GetCode(), &patch_site, expr->id());
   __ jmp(&done);
 
   __ bind(&smi_case);
@@ -1717,11 +1723,13 @@
 }
 
 
-void FullCodeGenerator::EmitBinaryOp(Token::Value op,
+void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
+                                     Token::Value op,
                                      OverwriteMode mode) {
   __ pop(rdx);
   TypeRecordingBinaryOpStub stub(op, mode);
-  EmitCallIC(stub.GetCode(), NULL);  // NULL signals no inlined smi code.
+  // NULL signals no inlined smi code.
+  EmitCallIC(stub.GetCode(), NULL, expr->id());
   context()->Plug(rax);
 }
 
@@ -1761,7 +1769,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->StoreIC_Initialize_Strict()
           : isolate()->builtins()->StoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
       break;
     }
     case KEYED_PROPERTY: {
@@ -1784,7 +1792,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
       break;
     }
   }
@@ -1810,7 +1818,7 @@
     Handle<Code> ic = is_strict_mode()
         ? isolate()->builtins()->StoreIC_Initialize_Strict()
         : isolate()->builtins()->StoreIC_Initialize();
-    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
 
   } else if (op == Token::INIT_CONST) {
     // Like var declarations, const declarations are hoisted to function
@@ -1913,7 +1921,7 @@
   Handle<Code> ic = is_strict_mode()
       ? isolate()->builtins()->StoreIC_Initialize_Strict()
       : isolate()->builtins()->StoreIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
 
   // If the assignment ends an initialization block, revert to fast case.
   if (expr->ends_initialization_block()) {
@@ -1953,7 +1961,7 @@
   Handle<Code> ic = is_strict_mode()
       ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
       : isolate()->builtins()->KeyedStoreIC_Initialize();
-  EmitCallIC(ic, RelocInfo::CODE_TARGET);
+  EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
 
   // If the assignment ends an initialization block, revert to fast case.
   if (expr->ends_initialization_block()) {
@@ -2005,7 +2013,7 @@
   InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
   Handle<Code> ic =
       ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
-  EmitCallIC(ic, mode);
+  EmitCallIC(ic, mode, expr->id());
   RecordJSReturnSite(expr);
   // Restore context register.
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2040,7 +2048,7 @@
   Handle<Code> ic =
       ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
   __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize));  // Key.
-  EmitCallIC(ic, mode);
+  EmitCallIC(ic, mode, expr->id());
   RecordJSReturnSite(expr);
   // Restore context register.
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2232,7 +2240,7 @@
         SetSourcePosition(prop->position());
 
         Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
-        EmitCallIC(ic, RelocInfo::CODE_TARGET);
+        EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
         // Push result (function).
         __ push(rax);
         // Push Global receiver.
@@ -2758,7 +2766,7 @@
   __ movd(xmm1, rcx);
   __ movd(xmm0, rax);
   __ cvtss2sd(xmm1, xmm1);
-  __ xorpd(xmm0, xmm1);
+  __ xorps(xmm0, xmm1);
   __ subsd(xmm0, xmm1);
   __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
 
@@ -3043,15 +3051,14 @@
 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
   ASSERT(args->length() >= 2);
 
-  int arg_count = args->length() - 2;  // For receiver and function.
-  VisitForStackValue(args->at(0));  // Receiver.
-  for (int i = 0; i < arg_count; i++) {
-    VisitForStackValue(args->at(i + 1));
+  int arg_count = args->length() - 2;  // 2 ~ receiver and function.
+  for (int i = 0; i < arg_count + 1; i++) {
+    VisitForStackValue(args->at(i));
   }
-  VisitForAccumulatorValue(args->at(arg_count + 1));  // Function.
+  VisitForAccumulatorValue(args->last());  // Function.
 
-  // InvokeFunction requires function in rdi. Move it in there.
-  if (!result_register().is(rdi)) __ movq(rdi, result_register());
+  // InvokeFunction requires the function in rdi. Move it in there.
+  __ movq(rdi, result_register());
   ParameterCount count(arg_count);
   __ InvokeFunction(rdi, count, CALL_FUNCTION);
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -3593,7 +3600,7 @@
     InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
     Handle<Code> ic =
         ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
     // Restore context register.
     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   } else {
@@ -3878,7 +3885,7 @@
     __ movq(rdx, rax);
     __ Move(rax, Smi::FromInt(1));
   }
-  EmitCallIC(stub.GetCode(), &patch_site);
+  EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
   __ bind(&done);
 
   // Store the value returned in rax.
@@ -3911,7 +3918,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->StoreIC_Initialize_Strict()
           : isolate()->builtins()->StoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3928,7 +3935,7 @@
       Handle<Code> ic = is_strict_mode()
           ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
           : isolate()->builtins()->KeyedStoreIC_Initialize();
-      EmitCallIC(ic, RelocInfo::CODE_TARGET);
+      EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3955,7 +3962,7 @@
     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
     // Use a regular load, not a contextual load, to avoid a reference
     // error.
-    EmitCallIC(ic, RelocInfo::CODE_TARGET);
+    EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
     PrepareForBailout(expr, TOS_REG);
     context()->Plug(rax);
   } else if (proxy != NULL &&
@@ -4153,7 +4160,7 @@
       // Record position and call the compare IC.
       SetSourcePosition(expr->position());
       Handle<Code> ic = CompareIC::GetUninitialized(op);
-      EmitCallIC(ic, &patch_site);
+      EmitCallIC(ic, &patch_site, expr->id());
 
       PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
       __ testq(rax, rax);
@@ -4213,7 +4220,9 @@
 }
 
 
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
+                                   RelocInfo::Mode mode,
+                                   unsigned ast_id) {
   ASSERT(mode == RelocInfo::CODE_TARGET ||
          mode == RelocInfo::CODE_TARGET_CONTEXT);
   Counters* counters = isolate()->counters();
@@ -4232,34 +4241,13 @@
     default:
       break;
   }
-
-  __ call(ic, mode);
-
-  // Crankshaft doesn't need patching of inlined loads and stores.
-  // When compiling the snapshot we need to produce code that works
-  // with and without Crankshaft.
-  if (V8::UseCrankshaft() && !Serializer::enabled()) {
-    return;
-  }
-
-  // If we're calling a (keyed) load or store stub, we have to mark
-  // the call as containing no inlined code so we will not attempt to
-  // patch it.
-  switch (ic->kind()) {
-    case Code::LOAD_IC:
-    case Code::KEYED_LOAD_IC:
-    case Code::STORE_IC:
-    case Code::KEYED_STORE_IC:
-      __ nop();  // Signals no inlined code.
-      break;
-    default:
-      // Do nothing.
-      break;
-  }
+  __ call(ic, mode, ast_id);
 }
 
 
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
+                                   JumpPatchSite* patch_site,
+                                   unsigned ast_id) {
   Counters* counters = isolate()->counters();
   switch (ic->kind()) {
     case Code::LOAD_IC:
@@ -4276,8 +4264,7 @@
     default:
       break;
   }
-
-  __ call(ic, RelocInfo::CODE_TARGET);
+  __ call(ic, RelocInfo::CODE_TARGET, ast_id);
   if (patch_site != NULL && patch_site->is_bound()) {
     patch_site->EmitPatchInfo();
   } else {
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 5ca56ac..5ed89b5 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -381,11 +381,6 @@
 }
 
 
-// The offset from the inlined patch site to the start of the inlined
-// load instruction.
-const int LoadIC::kOffsetToLoadInstruction = 20;
-
-
 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- rax    : receiver
@@ -1297,130 +1292,6 @@
 }
 
 
-bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
-  if (V8::UseCrankshaft()) return false;
-
-  // The address of the instruction following the call.
-  Address test_instruction_address =
-      address + Assembler::kCallTargetAddressOffset;
-  // If the instruction following the call is not a test rax, nothing
-  // was inlined.
-  if (*test_instruction_address != Assembler::kTestEaxByte) return false;
-
-  Address delta_address = test_instruction_address + 1;
-  // The delta to the start of the map check instruction.
-  int delta = *reinterpret_cast<int*>(delta_address);
-
-  // The map address is the last 8 bytes of the 10-byte
-  // immediate move instruction, so we add 2 to get the
-  // offset to the last 8 bytes.
-  Address map_address = test_instruction_address + delta + 2;
-  *(reinterpret_cast<Object**>(map_address)) = map;
-
-  // The offset is in the 32-bit displacement of a seven byte
-  // memory-to-register move instruction (REX.W 0x88 ModR/M disp32),
-  // so we add 3 to get the offset of the displacement.
-  Address offset_address =
-      test_instruction_address + delta + kOffsetToLoadInstruction + 3;
-  *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
-  return true;
-}
-
-
-bool LoadIC::PatchInlinedContextualLoad(Address address,
-                                        Object* map,
-                                        Object* cell,
-                                        bool is_dont_delete) {
-  // TODO(<bug#>): implement this.
-  return false;
-}
-
-
-bool StoreIC::PatchInlinedStore(Address address, Object* map, int offset) {
-  if (V8::UseCrankshaft()) return false;
-
-  // The address of the instruction following the call.
-  Address test_instruction_address =
-      address + Assembler::kCallTargetAddressOffset;
-
-  // If the instruction following the call is not a test rax, nothing
-  // was inlined.
-  if (*test_instruction_address != Assembler::kTestEaxByte) return false;
-
-  // Extract the encoded deltas from the test rax instruction.
-  Address encoded_offsets_address = test_instruction_address + 1;
-  int encoded_offsets = *reinterpret_cast<int*>(encoded_offsets_address);
-  int delta_to_map_check = -(encoded_offsets & 0xFFFF);
-  int delta_to_record_write = encoded_offsets >> 16;
-
-  // Patch the map to check. The map address is the last 8 bytes of
-  // the 10-byte immediate move instruction.
-  Address map_check_address = test_instruction_address + delta_to_map_check;
-  Address map_address = map_check_address + 2;
-  *(reinterpret_cast<Object**>(map_address)) = map;
-
-  // Patch the offset in the store instruction. The offset is in the
-  // last 4 bytes of a 7 byte register-to-memory move instruction.
-  Address offset_address =
-      map_check_address + StoreIC::kOffsetToStoreInstruction + 3;
-  // The offset should have initial value (kMaxInt - 1), cleared value
-  // (-1) or we should be clearing the inlined version.
-  ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt - 1 ||
-         *reinterpret_cast<int*>(offset_address) == -1 ||
-         (offset == 0 && map == HEAP->null_value()));
-  *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
-
-  // Patch the offset in the write-barrier code. The offset is the
-  // last 4 bytes of a 7 byte lea instruction.
-  offset_address = map_check_address + delta_to_record_write + 3;
-  // The offset should have initial value (kMaxInt), cleared value
-  // (-1) or we should be clearing the inlined version.
-  ASSERT(*reinterpret_cast<int*>(offset_address) == kMaxInt ||
-         *reinterpret_cast<int*>(offset_address) == -1 ||
-         (offset == 0 && map == HEAP->null_value()));
-  *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
-
-  return true;
-}
-
-
-static bool PatchInlinedMapCheck(Address address, Object* map) {
-  if (V8::UseCrankshaft()) return false;
-
-  // Arguments are address of start of call sequence that called
-  // the IC,
-  Address test_instruction_address =
-      address + Assembler::kCallTargetAddressOffset;
-  // The keyed load has a fast inlined case if the IC call instruction
-  // is immediately followed by a test instruction.
-  if (*test_instruction_address != Assembler::kTestEaxByte) return false;
-
-  // Fetch the offset from the test instruction to the map compare
-  // instructions (starting with the 64-bit immediate mov of the map
-  // address). This offset is stored in the last 4 bytes of the 5
-  // byte test instruction.
-  Address delta_address = test_instruction_address + 1;
-  int delta = *reinterpret_cast<int*>(delta_address);
-  // Compute the map address.  The map address is in the last 8 bytes
-  // of the 10-byte immediate mov instruction (incl. REX prefix), so we add 2
-  // to the offset to get the map address.
-  Address map_address = test_instruction_address + delta + 2;
-  // Patch the map check.
-  *(reinterpret_cast<Object**>(map_address)) = map;
-  return true;
-}
-
-
-bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
-  return PatchInlinedMapCheck(address, map);
-}
-
-
-bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
-  return PatchInlinedMapCheck(address, map);
-}
-
-
 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- rax    : key
@@ -1503,11 +1374,6 @@
 }
 
 
-// The offset from the inlined patch site to the start of the inlined
-// store instruction.
-const int StoreIC::kOffsetToStoreInstruction = 20;
-
-
 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- rax    : value
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 202e7a2..c242874 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -91,7 +91,7 @@
 
 void LCodeGen::FinishCode(Handle<Code> code) {
   ASSERT(is_done());
-  code->set_stack_slots(StackSlotCount());
+  code->set_stack_slots(GetStackSlotCount());
   code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
   PopulateDeoptimizationData(code);
   Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
@@ -146,7 +146,7 @@
   __ push(rdi);  // Callee's JS function.
 
   // Reserve space for the stack slots needed by the code.
-  int slots = StackSlotCount();
+  int slots = GetStackSlotCount();
   if (slots > 0) {
     if (FLAG_debug_code) {
       __ Set(rax, slots);
@@ -290,7 +290,7 @@
   while (byte_count-- > 0) {
     __ int3();
   }
-  safepoints_.Emit(masm(), StackSlotCount());
+  safepoints_.Emit(masm(), GetStackSlotCount());
   return !is_aborted();
 }
 
@@ -418,7 +418,7 @@
     translation->StoreDoubleStackSlot(op->index());
   } else if (op->IsArgument()) {
     ASSERT(is_tagged);
-    int src_index = StackSlotCount() + op->index();
+    int src_index = GetStackSlotCount() + op->index();
     translation->StoreStackSlot(src_index);
   } else if (op->IsRegister()) {
     Register reg = ToRegister(op);
@@ -1111,7 +1111,7 @@
   // Use xor to produce +0.0 in a fast and compact way, but avoid to
   // do so if the constant is -0.0.
   if (int_val == 0) {
-    __ xorpd(res, res);
+    __ xorps(res, res);
   } else {
     Register tmp = ToRegister(instr->TempAt(0));
     __ Set(tmp, int_val);
@@ -1223,12 +1223,12 @@
       break;
     case Token::MOD:
       __ PrepareCallCFunction(2);
-      __ movsd(xmm0, left);
+      __ movaps(xmm0, left);
       ASSERT(right.is(xmm1));
       __ CallCFunction(
           ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
       __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
-      __ movsd(result, xmm0);
+      __ movaps(result, xmm0);
       break;
     default:
       UNREACHABLE();
@@ -1287,7 +1287,7 @@
     EmitBranch(true_block, false_block, not_zero);
   } else if (r.IsDouble()) {
     XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
-    __ xorpd(xmm0, xmm0);
+    __ xorps(xmm0, xmm0);
     __ ucomisd(reg, xmm0);
     EmitBranch(true_block, false_block, not_equal);
   } else {
@@ -1322,7 +1322,7 @@
 
       // HeapNumber => false iff +0, -0, or NaN. These three cases set the
       // zero flag when compared to zero using ucomisd.
-      __ xorpd(xmm0, xmm0);
+      __ xorps(xmm0, xmm0);
       __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
       __ j(zero, false_label);
       __ jmp(true_label);
@@ -2058,7 +2058,7 @@
   }
   __ movq(rsp, rbp);
   __ pop(rbp);
-  __ Ret((ParameterCount() + 1) * kPointerSize, rcx);
+  __ Ret((GetParameterCount() + 1) * kPointerSize, rcx);
 }
 
 
@@ -2507,25 +2507,19 @@
                                          env->deoptimization_index());
   v8::internal::ParameterCount actual(rax);
   __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
+  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
 }
 
 
 void LCodeGen::DoPushArgument(LPushArgument* instr) {
   LOperand* argument = instr->InputAt(0);
-  if (argument->IsConstantOperand()) {
-    EmitPushConstantOperand(argument);
-  } else if (argument->IsRegister()) {
-    __ push(ToRegister(argument));
-  } else {
-    ASSERT(!argument->IsDoubleRegister());
-    __ push(ToOperand(argument));
-  }
+  EmitPushTaggedOperand(argument);
 }
 
 
 void LCodeGen::DoContext(LContext* instr) {
   Register result = ToRegister(instr->result());
-  __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset));
+  __ movq(result, rsi);
 }
 
 
@@ -2677,7 +2671,7 @@
   if (r.IsDouble()) {
     XMMRegister scratch = xmm0;
     XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
-    __ xorpd(scratch, scratch);
+    __ xorps(scratch, scratch);
     __ subsd(scratch, input_reg);
     __ andpd(input_reg, scratch);
   } else if (r.IsInteger32()) {
@@ -2688,7 +2682,9 @@
     Register input_reg = ToRegister(instr->InputAt(0));
     // Smi check.
     __ JumpIfNotSmi(input_reg, deferred->entry());
+    __ SmiToInteger32(input_reg, input_reg);
     EmitIntegerMathAbs(instr);
+    __ Integer32ToSmi(input_reg, input_reg);
     __ bind(deferred->exit());
   }
 }
@@ -2698,21 +2694,36 @@
   XMMRegister xmm_scratch = xmm0;
   Register output_reg = ToRegister(instr->result());
   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
-  __ xorpd(xmm_scratch, xmm_scratch);  // Zero the register.
-  __ ucomisd(input_reg, xmm_scratch);
 
-  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
-    DeoptimizeIf(below_equal, instr->environment());
+  if (CpuFeatures::IsSupported(SSE4_1)) {
+    CpuFeatures::Scope scope(SSE4_1);
+    if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+      // Deoptimize if minus zero.
+      __ movq(output_reg, input_reg);
+      __ subq(output_reg, Immediate(1));
+      DeoptimizeIf(overflow, instr->environment());
+    }
+    __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
+    __ cvttsd2si(output_reg, xmm_scratch);
+    __ cmpl(output_reg, Immediate(0x80000000));
+    DeoptimizeIf(equal, instr->environment());
   } else {
-    DeoptimizeIf(below, instr->environment());
+    __ xorps(xmm_scratch, xmm_scratch);  // Zero the register.
+    __ ucomisd(input_reg, xmm_scratch);
+
+    if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+      DeoptimizeIf(below_equal, instr->environment());
+    } else {
+      DeoptimizeIf(below, instr->environment());
+    }
+
+    // Use truncating instruction (OK because input is positive).
+    __ cvttsd2si(output_reg, input_reg);
+
+    // Overflow is signalled with minint.
+    __ cmpl(output_reg, Immediate(0x80000000));
+    DeoptimizeIf(equal, instr->environment());
   }
-
-  // Use truncating instruction (OK because input is positive).
-  __ cvttsd2si(output_reg, input_reg);
-
-  // Overflow is signalled with minint.
-  __ cmpl(output_reg, Immediate(0x80000000));
-  DeoptimizeIf(equal, instr->environment());
 }
 
 
@@ -2721,33 +2732,44 @@
   Register output_reg = ToRegister(instr->result());
   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
 
+  Label done;
   // xmm_scratch = 0.5
   __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
   __ movq(xmm_scratch, kScratchRegister);
-
+  NearLabel below_half;
+  __ ucomisd(xmm_scratch, input_reg);
+  __ j(above, &below_half);  // If input_reg is NaN, this doesn't jump.
   // input = input + 0.5
+  // This addition might give a result that isn't the correct for
+  // rounding, due to loss of precision, but only for a number that's
+  // so big that the conversion below will overflow anyway.
   __ addsd(input_reg, xmm_scratch);
-
-  // We need to return -0 for the input range [-0.5, 0[, otherwise
-  // compute Math.floor(value + 0.5).
-  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
-    __ ucomisd(input_reg, xmm_scratch);
-    DeoptimizeIf(below_equal, instr->environment());
-  } else {
-    // If we don't need to bailout on -0, we check only bailout
-    // on negative inputs.
-    __ xorpd(xmm_scratch, xmm_scratch);  // Zero the register.
-    __ ucomisd(input_reg, xmm_scratch);
-    DeoptimizeIf(below, instr->environment());
-  }
-
-  // Compute Math.floor(value + 0.5).
+  // Compute Math.floor(input).
   // Use truncating instruction (OK because input is positive).
   __ cvttsd2si(output_reg, input_reg);
-
   // Overflow is signalled with minint.
   __ cmpl(output_reg, Immediate(0x80000000));
   DeoptimizeIf(equal, instr->environment());
+  __ jmp(&done);
+
+  __ bind(&below_half);
+  if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+    // Bailout if negative (including -0).
+    __ movq(output_reg, input_reg);
+    __ testq(output_reg, output_reg);
+    DeoptimizeIf(negative, instr->environment());
+  } else {
+    // Bailout if below -0.5, otherwise round to (positive) zero, even
+    // if negative.
+    // xmm_scrach = -0.5
+    __ movq(kScratchRegister, V8_INT64_C(0xBFE0000000000000), RelocInfo::NONE);
+    __ movq(xmm_scratch, kScratchRegister);
+    __ ucomisd(input_reg, xmm_scratch);
+    DeoptimizeIf(below, instr->environment());
+  }
+  __ xorl(output_reg, output_reg);
+
+  __ bind(&done);
 }
 
 
@@ -2762,7 +2784,7 @@
   XMMRegister xmm_scratch = xmm0;
   XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
   ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
-  __ xorpd(xmm_scratch, xmm_scratch);
+  __ xorps(xmm_scratch, xmm_scratch);
   __ addsd(input_reg, xmm_scratch);  // Convert -0 to +0.
   __ sqrtsd(input_reg, input_reg);
 }
@@ -2778,7 +2800,7 @@
   if (exponent_type.IsDouble()) {
     __ PrepareCallCFunction(2);
     // Move arguments to correct registers
-    __ movsd(xmm0, left_reg);
+    __ movaps(xmm0, left_reg);
     ASSERT(ToDoubleRegister(right).is(xmm1));
     __ CallCFunction(
         ExternalReference::power_double_double_function(isolate()), 2);
@@ -2786,7 +2808,7 @@
     __ PrepareCallCFunction(2);
     // Move arguments to correct registers: xmm0 and edi (not rdi).
     // On Windows, the registers are xmm0 and edx.
-    __ movsd(xmm0, left_reg);
+    __ movaps(xmm0, left_reg);
 #ifdef _WIN64
     ASSERT(ToRegister(right).is(rdx));
 #else
@@ -2812,13 +2834,13 @@
     __ bind(&call);
     __ PrepareCallCFunction(2);
     // Move arguments to correct registers xmm0 and xmm1.
-    __ movsd(xmm0, left_reg);
+    __ movaps(xmm0, left_reg);
     // Right argument is already in xmm1.
     __ CallCFunction(
         ExternalReference::power_double_double_function(isolate()), 2);
   }
   // Return value is in xmm0.
-  __ movsd(result_reg, xmm0);
+  __ movaps(result_reg, xmm0);
   // Restore context register.
   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
 }
@@ -2881,6 +2903,21 @@
 }
 
 
+void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
+  ASSERT(ToRegister(instr->function()).is(rdi));
+  ASSERT(instr->HasPointerMap());
+  ASSERT(instr->HasDeoptimizationEnvironment());
+  LPointerMap* pointers = instr->pointer_map();
+  LEnvironment* env = instr->deoptimization_environment();
+  RecordPosition(pointers->position());
+  RegisterEnvironmentForDeoptimization(env);
+  SafepointGenerator generator(this, pointers, env->deoptimization_index());
+  ParameterCount count(instr->arity());
+  __ InvokeFunction(rdi, count, CALL_FUNCTION, &generator);
+  __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+}
+
+
 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
   ASSERT(ToRegister(instr->key()).is(rcx));
   ASSERT(ToRegister(instr->result()).is(rax));
@@ -3087,6 +3124,14 @@
 }
 
 
+void LCodeGen::DoStringAdd(LStringAdd* instr) {
+  EmitPushTaggedOperand(instr->left());
+  EmitPushTaggedOperand(instr->right());
+  StringAddStub stub(NO_STRING_CHECK_IN_STUB);
+  CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+}
+
+
 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
   class DeferredStringCharCodeAt: public LDeferredCode {
    public:
@@ -3377,7 +3422,7 @@
   DeoptimizeIf(not_equal, env);
 
   // Convert undefined to NaN. Compute NaN as 0/0.
-  __ xorpd(result_reg, result_reg);
+  __ xorps(result_reg, result_reg);
   __ divsd(result_reg, result_reg);
   __ jmp(&done);
 
@@ -3746,14 +3791,7 @@
 
 void LCodeGen::DoTypeof(LTypeof* instr) {
   LOperand* input = instr->InputAt(0);
-  if (input->IsConstantOperand()) {
-    __ Push(ToHandle(LConstantOperand::cast(input)));
-  } else if (input->IsRegister()) {
-    __ push(ToRegister(input));
-  } else {
-    ASSERT(input->IsStackSlot());
-    __ push(ToOperand(input));
-  }
+  EmitPushTaggedOperand(input);
   CallRuntime(Runtime::kTypeof, 1, instr);
 }
 
@@ -3781,19 +3819,14 @@
 }
 
 
-void LCodeGen::EmitPushConstantOperand(LOperand* operand) {
-  ASSERT(operand->IsConstantOperand());
-  LConstantOperand* const_op = LConstantOperand::cast(operand);
-  Handle<Object> literal = chunk_->LookupLiteral(const_op);
-  Representation r = chunk_->LookupLiteralRepresentation(const_op);
-  if (r.IsInteger32()) {
-    ASSERT(literal->IsNumber());
-    __ push(Immediate(static_cast<int32_t>(literal->Number())));
-  } else if (r.IsDouble()) {
-    Abort("unsupported double immediate");
+void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
+  ASSERT(!operand->IsDoubleRegister());
+  if (operand->IsConstantOperand()) {
+    __ Push(ToHandle(LConstantOperand::cast(operand)));
+  } else if (operand->IsRegister()) {
+    __ push(ToRegister(operand));
   } else {
-    ASSERT(r.IsTagged());
-    __ Push(literal);
+    __ push(ToOperand(operand));
   }
 }
 
@@ -3939,20 +3972,8 @@
 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
   LOperand* obj = instr->object();
   LOperand* key = instr->key();
-  // Push object.
-  if (obj->IsRegister()) {
-    __ push(ToRegister(obj));
-  } else {
-    __ push(ToOperand(obj));
-  }
-  // Push key.
-  if (key->IsConstantOperand()) {
-    EmitPushConstantOperand(key);
-  } else if (key->IsRegister()) {
-    __ push(ToRegister(key));
-  } else {
-    __ push(ToOperand(key));
-  }
+  EmitPushTaggedOperand(obj);
+  EmitPushTaggedOperand(key);
   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
   LPointerMap* pointers = instr->pointer_map();
   LEnvironment* env = instr->deoptimization_environment();
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index 34277f6..96e0a0f 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -141,8 +141,8 @@
                        Register input,
                        Register temporary);
 
-  int StackSlotCount() const { return chunk()->spill_slot_count(); }
-  int ParameterCount() const { return scope()->num_parameters(); }
+  int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
+  int GetParameterCount() const { return scope()->num_parameters(); }
 
   void Abort(const char* format, ...);
   void Comment(const char* format, ...);
@@ -268,8 +268,9 @@
                      Handle<Map> type,
                      Handle<String> name);
 
-  // Emits code for pushing a constant operand.
-  void EmitPushConstantOperand(LOperand* operand);
+  // Emits code for pushing either a tagged constant, a (non-double)
+  // register, or a stack slot operand.
+  void EmitPushTaggedOperand(LOperand* operand);
 
   struct JumpTableEntry {
     explicit inline JumpTableEntry(Address entry)
diff --git a/src/x64/lithium-gap-resolver-x64.cc b/src/x64/lithium-gap-resolver-x64.cc
index cedd025..c3c617c 100644
--- a/src/x64/lithium-gap-resolver-x64.cc
+++ b/src/x64/lithium-gap-resolver-x64.cc
@@ -214,7 +214,7 @@
   } else if (source->IsDoubleRegister()) {
     XMMRegister src = cgen_->ToDoubleRegister(source);
     if (destination->IsDoubleRegister()) {
-      __ movsd(cgen_->ToDoubleRegister(destination), src);
+      __ movaps(cgen_->ToDoubleRegister(destination), src);
     } else {
       ASSERT(destination->IsDoubleStackSlot());
       __ movsd(cgen_->ToOperand(destination), src);
@@ -273,9 +273,9 @@
     // Swap two double registers.
     XMMRegister source_reg = cgen_->ToDoubleRegister(source);
     XMMRegister destination_reg = cgen_->ToDoubleRegister(destination);
-    __ movsd(xmm0, source_reg);
-    __ movsd(source_reg, destination_reg);
-    __ movsd(destination_reg, xmm0);
+    __ movaps(xmm0, source_reg);
+    __ movaps(source_reg, destination_reg);
+    __ movaps(destination_reg, xmm0);
 
   } else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) {
     // Swap a double register and a double stack slot.
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index 07ca3a5..620bbc9 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -71,22 +71,21 @@
 
 #ifdef DEBUG
 void LInstruction::VerifyCall() {
-  // Call instructions can use only fixed registers as
-  // temporaries and outputs because all registers
-  // are blocked by the calling convention.
-  // Inputs must use a fixed register.
+  // Call instructions can use only fixed registers as temporaries and
+  // outputs because all registers are blocked by the calling convention.
+  // Inputs operands must use a fixed register or use-at-start policy or
+  // a non-register policy.
   ASSERT(Output() == NULL ||
          LUnallocated::cast(Output())->HasFixedPolicy() ||
          !LUnallocated::cast(Output())->HasRegisterPolicy());
   for (UseIterator it(this); it.HasNext(); it.Advance()) {
-    LOperand* operand = it.Next();
-    ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
-           !LUnallocated::cast(operand)->HasRegisterPolicy());
+    LUnallocated* operand = LUnallocated::cast(it.Next());
+    ASSERT(operand->HasFixedPolicy() ||
+           operand->IsUsedAtStart());
   }
   for (TempIterator it(this); it.HasNext(); it.Advance()) {
-    LOperand* operand = it.Next();
-    ASSERT(LUnallocated::cast(operand)->HasFixedPolicy() ||
-           !LUnallocated::cast(operand)->HasRegisterPolicy());
+    LUnallocated* operand = LUnallocated::cast(it.Next());
+    ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
   }
 }
 #endif
@@ -303,6 +302,13 @@
 }
 
 
+void LInvokeFunction::PrintDataTo(StringStream* stream) {
+  stream->Add("= ");
+  InputAt(0)->PrintTo(stream);
+  stream->Add(" #%d / ", arity());
+}
+
+
 void LCallKeyed::PrintDataTo(StringStream* stream) {
   stream->Add("[rcx] #%d / ", arity());
 }
@@ -1211,6 +1217,14 @@
 }
 
 
+LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
+  LOperand* function = UseFixed(instr->function(), rdi);
+  argument_count_ -= instr->argument_count();
+  LInvokeFunction* result = new LInvokeFunction(function);
+  return MarkAsCall(DefineFixed(result, rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
+}
+
+
 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
   BuiltinFunctionId op = instr->op();
   if (op == kMathLog || op == kMathSin || op == kMathCos) {
@@ -1941,6 +1955,13 @@
 }
 
 
+LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
+  LOperand* left = UseOrConstantAtStart(instr->left());
+  LOperand* right = UseOrConstantAtStart(instr->right());
+  return MarkAsCall(DefineFixed(new LStringAdd(left, right), rax), instr);
+}
+
+
 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
   LOperand* string = UseRegister(instr->string());
   LOperand* index = UseRegisterOrConstant(instr->index());
@@ -1984,7 +2005,8 @@
 
 LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
   LDeleteProperty* result =
-      new LDeleteProperty(Use(instr->object()), UseOrConstant(instr->key()));
+      new LDeleteProperty(UseAtStart(instr->object()),
+                          UseOrConstantAtStart(instr->key()));
   return MarkAsCall(DefineFixed(result, rax), instr);
 }
 
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index 15bb894..74f4820 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -98,14 +98,15 @@
   V(GlobalObject)                               \
   V(GlobalReceiver)                             \
   V(Goto)                                       \
-  V(HasInstanceType)                            \
-  V(HasInstanceTypeAndBranch)                   \
   V(HasCachedArrayIndex)                        \
   V(HasCachedArrayIndexAndBranch)               \
+  V(HasInstanceType)                            \
+  V(HasInstanceTypeAndBranch)                   \
   V(InstanceOf)                                 \
   V(InstanceOfAndBranch)                        \
   V(InstanceOfKnownGlobal)                      \
   V(Integer32ToDouble)                          \
+  V(InvokeFunction)                             \
   V(IsNull)                                     \
   V(IsNullAndBranch)                            \
   V(IsObject)                                   \
@@ -152,6 +153,7 @@
   V(StoreKeyedSpecializedArrayElement)          \
   V(StoreNamedField)                            \
   V(StoreNamedGeneric)                          \
+  V(StringAdd)                                  \
   V(StringCharCodeAt)                           \
   V(StringCharFromCode)                         \
   V(StringLength)                               \
@@ -1393,6 +1395,23 @@
 };
 
 
+class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
+ public:
+  explicit LInvokeFunction(LOperand* function) {
+    inputs_[0] = function;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(InvokeFunction, "invoke-function")
+  DECLARE_HYDROGEN_ACCESSOR(InvokeFunction)
+
+  LOperand* function() { return inputs_[0]; }
+
+  virtual void PrintDataTo(StringStream* stream);
+
+  int arity() const { return hydrogen()->argument_count() - 1; }
+};
+
+
 class LCallKeyed: public LTemplateInstruction<1, 1, 0> {
  public:
   explicit LCallKeyed(LOperand* key) {
@@ -1684,6 +1703,21 @@
 };
 
 
+class LStringAdd: public LTemplateInstruction<1, 2, 0> {
+ public:
+  LStringAdd(LOperand* left, LOperand* right) {
+    inputs_[0] = left;
+    inputs_[1] = right;
+  }
+
+  DECLARE_CONCRETE_INSTRUCTION(StringAdd, "string-add")
+  DECLARE_HYDROGEN_ACCESSOR(StringAdd)
+
+  LOperand* left() { return inputs_[0]; }
+  LOperand* right() { return inputs_[1]; }
+};
+
+
 class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
  public:
   LStringCharCodeAt(LOperand* string, LOperand* index) {
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 7f027f7..24f2fef 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -425,9 +425,9 @@
 }
 
 
-void MacroAssembler::CallStub(CodeStub* stub) {
+void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
   ASSERT(allow_stub_calls());  // calls are not allowed in some stubs
-  Call(stub->GetCode(), RelocInfo::CODE_TARGET);
+  Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
 }
 
 
@@ -650,6 +650,7 @@
   Label leave_exit_frame;
   Label write_back;
 
+  Factory* factory = isolate()->factory();
   ExternalReference next_address =
       ExternalReference::handle_scope_next_address();
   const int kNextOffset = 0;
@@ -697,7 +698,7 @@
 
   // Check if the function scheduled an exception.
   movq(rsi, scheduled_exception_address);
-  Cmp(Operand(rsi, 0), FACTORY->the_hole_value());
+  Cmp(Operand(rsi, 0), factory->the_hole_value());
   j(not_equal, &promote_scheduled_exception);
 
   LeaveApiExitFrame();
@@ -712,7 +713,7 @@
 
   bind(&empty_result);
   // It was zero; the result is undefined.
-  Move(rax, FACTORY->undefined_value());
+  Move(rax, factory->undefined_value());
   jmp(&prologue);
 
   // HandleScope limit has changed. Delete allocated extensions.
@@ -1247,12 +1248,17 @@
                             Register src2) {
   // No overflow checking. Use only when it's known that
   // overflowing is impossible.
-  ASSERT(!dst.is(src2));
   if (!dst.is(src1)) {
-    movq(dst, src1);
+    if (emit_debug_code()) {
+      movq(kScratchRegister, src1);
+      addq(kScratchRegister, src2);
+      Check(no_overflow, "Smi addition overflow");
+    }
+    lea(dst, Operand(src1, src2, times_1, 0));
+  } else {
+    addq(dst, src2);
+    Assert(no_overflow, "Smi addition overflow");
   }
-  addq(dst, src2);
-  Assert(no_overflow, "Smi addition overflow");
 }
 
 
@@ -1604,12 +1610,14 @@
 }
 
 
-void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
+void MacroAssembler::Call(Handle<Code> code_object,
+                          RelocInfo::Mode rmode,
+                          unsigned ast_id) {
 #ifdef DEBUG
   int end_position = pc_offset() + CallSize(code_object);
 #endif
   ASSERT(RelocInfo::IsCodeTarget(rmode));
-  call(code_object, rmode);
+  call(code_object, rmode, ast_id);
 #ifdef DEBUG
   CHECK_EQ(end_position, pc_offset());
 #endif
@@ -1895,7 +1903,7 @@
   Condition is_smi = CheckSmi(object);
   j(is_smi, &ok);
   Cmp(FieldOperand(object, HeapObject::kMapOffset),
-      FACTORY->heap_number_map());
+      isolate()->factory()->heap_number_map());
   Assert(equal, "Operand not a number");
   bind(&ok);
 }
@@ -2152,7 +2160,7 @@
   push(kScratchRegister);
   if (emit_debug_code()) {
     movq(kScratchRegister,
-         FACTORY->undefined_value(),
+         isolate()->factory()->undefined_value(),
          RelocInfo::EMBEDDED_OBJECT);
     cmpq(Operand(rsp, 0), kScratchRegister);
     Check(not_equal, "code object not properly patched");
@@ -2320,7 +2328,7 @@
   // Check the context is a global context.
   if (emit_debug_code()) {
     Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
-        FACTORY->global_context_map());
+        isolate()->factory()->global_context_map());
     Check(equal, "JSGlobalObject::global_context should be a global context.");
   }
 
@@ -2822,7 +2830,7 @@
   movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   if (emit_debug_code()) {
     Label ok, fail;
-    CheckMap(map, FACTORY->meta_map(), &fail, false);
+    CheckMap(map, isolate()->factory()->meta_map(), &fail, false);
     jmp(&ok);
     bind(&fail);
     Abort("Global functions must have initial map");
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 4c17720..8499edf 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -692,7 +692,9 @@
 
   void Call(Address destination, RelocInfo::Mode rmode);
   void Call(ExternalReference ext);
-  void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
+  void Call(Handle<Code> code_object,
+            RelocInfo::Mode rmode,
+            unsigned ast_id = kNoASTId);
 
   // The size of the code generated for different call instructions.
   int CallSize(Address destination, RelocInfo::Mode rmode) {
@@ -932,7 +934,7 @@
   // Runtime calls
 
   // Call a code stub.
-  void CallStub(CodeStub* stub);
+  void CallStub(CodeStub* stub, unsigned ast_id = kNoASTId);
 
   // Call a code stub and return the code object called.  Try to generate
   // the code if necessary.  Do not perform a GC but instead return a retry