Improved handling of relocation information to enable more peep-hole optimizations.

Optimized switch statements where all labels are constant small integers.

Optimized String.prototype.indexOf for common cases.

Fixed more build issues (issue 80).

Fixed a couple of profiler issues.

Fixed bug where the body of a function created using the Function constructor was not allowed to end with a single-line comment (issue 85).

Improved handling of object literals by canonicalizing object literal maps.  This will allow JSON objects with the same set of properties to share the same map making inline caching work better for JSON objects.



git-svn-id: http://v8.googlecode.com/svn/trunk@373 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/api.cc b/src/api.cc
index 79ef605..eee5a7a 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -2216,7 +2216,7 @@
 
 
 const char* v8::V8::GetVersion() {
-  return "0.3.2";
+  return "0.3.3";
 }
 
 
diff --git a/src/assembler-arm-inl.h b/src/assembler-arm-inl.h
index d949811..fbe37d7 100644
--- a/src/assembler-arm-inl.h
+++ b/src/assembler-arm-inl.h
@@ -50,42 +50,48 @@
 
 
 void RelocInfo::apply(int delta) {
-  // We do not use pc relative addressing on ARM, so there is nothing to do.
+  if (RelocInfo::IsInternalReference(rmode_)) {
+    // absolute code pointer inside code object moves with the code object.
+    int32_t* p = reinterpret_cast<int32_t*>(pc_);
+    *p += delta;  // relocate entry
+  }
+  // We do not use pc relative addressing on ARM, so there is
+  // nothing else to do.
 }
 
 
 Address RelocInfo::target_address() {
-  ASSERT(is_code_target(rmode_));
+  ASSERT(IsCodeTarget(rmode_));
   return Assembler::target_address_at(pc_);
 }
 
 
 void RelocInfo::set_target_address(Address target) {
-  ASSERT(is_code_target(rmode_));
+  ASSERT(IsCodeTarget(rmode_));
   Assembler::set_target_address_at(pc_, target);
 }
 
 
 Object* RelocInfo::target_object() {
-  ASSERT(is_code_target(rmode_) || rmode_ == embedded_object);
+  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   return reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
 }
 
 
 Object** RelocInfo::target_object_address() {
-  ASSERT(is_code_target(rmode_) || rmode_ == embedded_object);
+  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   return reinterpret_cast<Object**>(Assembler::target_address_address_at(pc_));
 }
 
 
 void RelocInfo::set_target_object(Object* target) {
-  ASSERT(is_code_target(rmode_) || rmode_ == embedded_object);
+  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
 }
 
 
 Address* RelocInfo::target_reference_address() {
-  ASSERT(rmode_ == external_reference);
+  ASSERT(rmode_ == EXTERNAL_REFERENCE);
   return reinterpret_cast<Address*>(pc_);
 }
 
@@ -129,7 +135,7 @@
 }
 
 
-Operand::Operand(int32_t immediate, RelocMode rmode)  {
+Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
   rm_ = no_reg;
   imm32_ = immediate;
   rmode_ = rmode;
@@ -139,35 +145,35 @@
 Operand::Operand(const char* s) {
   rm_ = no_reg;
   imm32_ = reinterpret_cast<int32_t>(s);
-  rmode_ = embedded_string;
+  rmode_ = RelocInfo::EMBEDDED_STRING;
 }
 
 
 Operand::Operand(const ExternalReference& f)  {
   rm_ = no_reg;
   imm32_ = reinterpret_cast<int32_t>(f.address());
-  rmode_ = external_reference;
+  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
 }
 
 
 Operand::Operand(Object** opp) {
   rm_ = no_reg;
   imm32_ = reinterpret_cast<int32_t>(opp);
-  rmode_ = no_reloc;
+  rmode_ = RelocInfo::NONE;
 }
 
 
 Operand::Operand(Context** cpp) {
   rm_ = no_reg;
   imm32_ = reinterpret_cast<int32_t>(cpp);
-  rmode_ = no_reloc;
+  rmode_ = RelocInfo::NONE;
 }
 
 
 Operand::Operand(Smi* value) {
   rm_ = no_reg;
   imm32_ =  reinterpret_cast<intptr_t>(value);
-  rmode_ = no_reloc;
+  rmode_ = RelocInfo::NONE;
 }
 
 
diff --git a/src/assembler-arm.cc b/src/assembler-arm.cc
index 12d110b..71894e5 100644
--- a/src/assembler-arm.cc
+++ b/src/assembler-arm.cc
@@ -164,11 +164,11 @@
   ASSERT(!Heap::InNewSpace(obj));
   if (obj->IsHeapObject()) {
     imm32_ = reinterpret_cast<intptr_t>(handle.location());
-    rmode_ = embedded_object;
+    rmode_ = RelocInfo::EMBEDDED_OBJECT;
   } else {
     // no relocation needed
     imm32_ =  reinterpret_cast<intptr_t>(obj);
-    rmode_ = no_reloc;
+    rmode_ = RelocInfo::NONE;
   }
 }
 
@@ -320,7 +320,7 @@
   no_const_pool_before_ = 0;
   last_const_pool_end_ = 0;
   last_bound_pos_ = 0;
-  last_position_ = kNoPosition;
+  last_position_ = RelocInfo::kNoPosition;
   last_position_is_statement_ = false;
 }
 
@@ -588,7 +588,8 @@
     // immediate
     uint32_t rotate_imm;
     uint32_t immed_8;
-    if ((x.rmode_ != no_reloc && x.rmode_ != external_reference) ||
+    if ((x.rmode_ != RelocInfo::NONE &&
+         x.rmode_ != RelocInfo::EXTERNAL_REFERENCE) ||
         !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) {
       // The immediate operand cannot be encoded as a shifter operand, so load
       // it first to register ip and change the original instruction to use ip.
@@ -1006,7 +1007,8 @@
     // immediate
     uint32_t rotate_imm;
     uint32_t immed_8;
-    if ((src.rmode_ != no_reloc && src.rmode_ != external_reference)||
+    if ((src.rmode_ != RelocInfo::NONE &&
+         src.rmode_ != RelocInfo::EXTERNAL_REFERENCE)||
         !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) {
       // immediate operand cannot be encoded, load it first to register ip
       RecordRelocInfo(src.rmode_, src.imm32_);
@@ -1353,17 +1355,17 @@
 void Assembler::RecordComment(const char* msg) {
   if (FLAG_debug_code) {
     CheckBuffer();
-    RecordRelocInfo(comment, reinterpret_cast<intptr_t>(msg));
+    RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
   }
 }
 
 
 void Assembler::RecordPosition(int pos) {
-  if (pos == kNoPosition) return;
-  ASSERT(position >= 0);
+  if (pos == RelocInfo::kNoPosition) return;
+  ASSERT(pos >= 0);
   if (pos == last_position_) return;
   CheckBuffer();
-  RecordRelocInfo(position, pos);
+  RecordRelocInfo(RelocInfo::POSITION, pos);
   last_position_ = pos;
   last_position_is_statement_ = false;
 }
@@ -1372,7 +1374,7 @@
 void Assembler::RecordStatementPosition(int pos) {
   if (pos == last_position_) return;
   CheckBuffer();
-  RecordRelocInfo(statement_position, pos);
+  RecordRelocInfo(RelocInfo::STATEMENT_POSITION, pos);
   last_position_ = pos;
   last_position_is_statement_ = true;
 }
@@ -1420,17 +1422,18 @@
   // relocate pending relocation entries
   for (int i = 0; i < num_prinfo_; i++) {
     RelocInfo& rinfo = prinfo_[i];
-    ASSERT(rinfo.rmode() != comment && rinfo.rmode() != position);
+    ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
+           rinfo.rmode() != RelocInfo::POSITION);
     rinfo.set_pc(rinfo.pc() + pc_delta);
   }
 }
 
 
-void Assembler::RecordRelocInfo(RelocMode rmode, intptr_t data) {
+void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
   RelocInfo rinfo(pc_, rmode, data);  // we do not try to reuse pool constants
-  if (rmode >= comment && rmode <= statement_position) {
+  if (rmode >= RelocInfo::COMMENT && rmode <= RelocInfo::STATEMENT_POSITION) {
     // adjust code for new modes
-    ASSERT(is_comment(rmode) || is_position(rmode));
+    ASSERT(RelocInfo::IsComment(rmode) || RelocInfo::IsPosition(rmode));
     // these modes do not need an entry in the constant pool
   } else {
     ASSERT(num_prinfo_ < kMaxNumPRInfo);
@@ -1439,9 +1442,9 @@
     // instruction for which we just recorded relocation info
     BlockConstPoolBefore(pc_offset() + kInstrSize);
   }
-  if (rinfo.rmode() != no_reloc) {
+  if (rinfo.rmode() != RelocInfo::NONE) {
     // Don't record external references unless the heap will be serialized.
-    if (rmode == external_reference &&
+    if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
         !Serializer::enabled() &&
         !FLAG_debug_code) {
       return;
@@ -1520,8 +1523,9 @@
   // Emit constant pool entries
   for (int i = 0; i < num_prinfo_; i++) {
     RelocInfo& rinfo = prinfo_[i];
-    ASSERT(rinfo.rmode() != comment && rinfo.rmode() != position &&
-           rinfo.rmode() != statement_position);
+    ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
+           rinfo.rmode() != RelocInfo::POSITION &&
+           rinfo.rmode() != RelocInfo::STATEMENT_POSITION);
     Instr instr = instr_at(rinfo.pc());
     // Instruction to patch must be a ldr/str [pc, #offset]
     // P and U set, B and W clear, Rn == pc, offset12 still 0
diff --git a/src/assembler-arm.h b/src/assembler-arm.h
index b9fa81d..351ce3d 100644
--- a/src/assembler-arm.h
+++ b/src/assembler-arm.h
@@ -298,7 +298,8 @@
 class Operand BASE_EMBEDDED {
  public:
   // immediate
-  INLINE(explicit Operand(int32_t immediate, RelocMode rmode = no_reloc));
+  INLINE(explicit Operand(int32_t immediate,
+         RelocInfo::Mode rmode = RelocInfo::NONE));
   INLINE(explicit Operand(const ExternalReference& f));
   INLINE(explicit Operand(const char* s));
   INLINE(explicit Operand(Object** opp));
@@ -326,7 +327,7 @@
   ShiftOp shift_op_;
   int shift_imm_;  // valid if rm_ != no_reg && rs_ == no_reg
   int32_t imm32_;  // valid if rm_ == no_reg
-  RelocMode rmode_;
+  RelocInfo::Mode rmode_;
 
   friend class Assembler;
 };
@@ -649,6 +650,9 @@
     return last_position_is_statement_;
   }
 
+  // Temporary helper function. Used by codegen.cc.
+  int last_statement_position() const { return last_position_; }
+
  protected:
   int buffer_space() const { return reloc_info_writer.pos() - pc_; }
 
@@ -769,7 +773,7 @@
   void next(Label* L);
 
   // Record reloc info for current pc_
-  void RecordRelocInfo(RelocMode rmode, intptr_t data = 0);
+  void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
 
   // Check if is time to emit a constant pool for pending reloc info entries
   void CheckConstPool(bool force_emit, bool require_jump);
diff --git a/src/assembler-ia32-inl.h b/src/assembler-ia32-inl.h
index 66102e6..9b3567a 100644
--- a/src/assembler-ia32-inl.h
+++ b/src/assembler-ia32-inl.h
@@ -48,50 +48,54 @@
 
 // The modes possibly affected by apply must be in kApplyMask.
 void RelocInfo::apply(int delta) {
-  if (rmode_ == runtime_entry || is_code_target(rmode_)) {
+  if (rmode_ == RUNTIME_ENTRY || IsCodeTarget(rmode_)) {
     int32_t* p = reinterpret_cast<int32_t*>(pc_);
     *p -= delta;  // relocate entry
-  } else if (rmode_ == js_return && is_call_instruction()) {
+  } else if (rmode_ == JS_RETURN && is_call_instruction()) {
     // Special handling of js_return when a break point is set (call
     // instruction has been inserted).
     int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
     *p -= delta;  // relocate entry
+  } else if (IsInternalReference(rmode_)) {
+    // absolute code pointer inside code object moves with the code object.
+    int32_t* p = reinterpret_cast<int32_t*>(pc_);
+    *p += delta;  // relocate entry
   }
 }
 
 
 Address RelocInfo::target_address() {
-  ASSERT(is_code_target(rmode_) || rmode_ == runtime_entry);
+  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
   return Assembler::target_address_at(pc_);
 }
 
 
 void RelocInfo::set_target_address(Address target) {
-  ASSERT(is_code_target(rmode_) || rmode_ == runtime_entry);
+  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
   Assembler::set_target_address_at(pc_, target);
 }
 
 
 Object* RelocInfo::target_object() {
-  ASSERT(is_code_target(rmode_) || rmode_ == embedded_object);
+  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   return *reinterpret_cast<Object**>(pc_);
 }
 
 
 Object** RelocInfo::target_object_address() {
-  ASSERT(is_code_target(rmode_) || rmode_ == embedded_object);
+  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   return reinterpret_cast<Object**>(pc_);
 }
 
 
 void RelocInfo::set_target_object(Object* target) {
-  ASSERT(is_code_target(rmode_) || rmode_ == embedded_object);
+  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
   *reinterpret_cast<Object**>(pc_) = target;
 }
 
 
 Address* RelocInfo::target_reference_address() {
-  ASSERT(rmode_ == external_reference);
+  ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
   return reinterpret_cast<Address*>(pc_);
 }
 
@@ -133,18 +137,18 @@
 
 Immediate::Immediate(int x)  {
   x_ = x;
-  rmode_ = no_reloc;
+  rmode_ = RelocInfo::NONE;
 }
 
 
 Immediate::Immediate(const ExternalReference& ext) {
   x_ = reinterpret_cast<int32_t>(ext.address());
-  rmode_ = external_reference;
+  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
 }
 
 Immediate::Immediate(const char* s) {
   x_ = reinterpret_cast<int32_t>(s);
-  rmode_ = embedded_string;
+  rmode_ = RelocInfo::EMBEDDED_STRING;
 }
 
 
@@ -154,18 +158,18 @@
   ASSERT(!Heap::InNewSpace(obj));
   if (obj->IsHeapObject()) {
     x_ = reinterpret_cast<intptr_t>(handle.location());
-    rmode_ = embedded_object;
+    rmode_ = RelocInfo::EMBEDDED_OBJECT;
   } else {
     // no relocation needed
     x_ =  reinterpret_cast<intptr_t>(obj);
-    rmode_ = no_reloc;
+    rmode_ = RelocInfo::NONE;
   }
 }
 
 
 Immediate::Immediate(Smi* value) {
   x_ = reinterpret_cast<intptr_t>(value);
-  rmode_ = no_reloc;
+  rmode_ = RelocInfo::NONE;
 }
 
 
@@ -180,7 +184,8 @@
   Object* obj = *handle;
   ASSERT(!Heap::InNewSpace(obj));
   if (obj->IsHeapObject()) {
-    emit(reinterpret_cast<intptr_t>(handle.location()), embedded_object);
+    emit(reinterpret_cast<intptr_t>(handle.location()),
+         RelocInfo::EMBEDDED_OBJECT);
   } else {
     // no relocation needed
     emit(reinterpret_cast<intptr_t>(obj));
@@ -188,14 +193,14 @@
 }
 
 
-void Assembler::emit(uint32_t x, RelocMode rmode) {
-  if (rmode != no_reloc) RecordRelocInfo(rmode);
+void Assembler::emit(uint32_t x, RelocInfo::Mode rmode) {
+  if (rmode != RelocInfo::NONE) RecordRelocInfo(rmode);
   emit(x);
 }
 
 
 void Assembler::emit(const Immediate& x) {
-  if (x.rmode_ != no_reloc) RecordRelocInfo(x.rmode_);
+  if (x.rmode_ != RelocInfo::NONE) RecordRelocInfo(x.rmode_);
   emit(x.x_);
 }
 
@@ -237,7 +242,7 @@
 }
 
 
-void Operand::set_dispr(int32_t disp, RelocMode rmode) {
+void Operand::set_dispr(int32_t disp, RelocInfo::Mode rmode) {
   ASSERT(len_ == 1 || len_ == 2);
   *reinterpret_cast<int32_t*>(&buf_[len_]) = disp;
   len_ += sizeof(int32_t);
@@ -250,7 +255,7 @@
 }
 
 
-Operand::Operand(int32_t disp, RelocMode rmode) {
+Operand::Operand(int32_t disp, RelocInfo::Mode rmode) {
   // [disp/r]
   set_modrm(0, ebp);
   set_dispr(disp, rmode);
diff --git a/src/assembler-ia32.cc b/src/assembler-ia32.cc
index 3f2b2c9..568d64d 100644
--- a/src/assembler-ia32.cc
+++ b/src/assembler-ia32.cc
@@ -151,7 +151,8 @@
 
 
 const int RelocInfo::kApplyMask =
-  RelocInfo::kCodeTargetMask | 1 << runtime_entry | 1 << js_return;
+  RelocInfo::kCodeTargetMask | 1 << RelocInfo::RUNTIME_ENTRY |
+    1 << RelocInfo::JS_RETURN | 1 << RelocInfo::INTERNAL_REFERENCE;
 
 
 void RelocInfo::patch_code(byte* instructions, int instruction_count) {
@@ -170,7 +171,7 @@
 
   // Patch the code.
   CodePatcher patcher(pc_, code_size);
-  patcher.masm()->call(target, no_reloc);
+  patcher.masm()->call(target, RelocInfo::NONE);
 
   // Add the requested number of int3 instructions after the call.
   for (int i = 0; i < guard_bytes; i++) {
@@ -182,13 +183,13 @@
 // -----------------------------------------------------------------------------
 // Implementation of Operand
 
-Operand::Operand(Register base, int32_t disp, RelocMode rmode) {
+Operand::Operand(Register base, int32_t disp, RelocInfo::Mode rmode) {
   // [base + disp/r]
-  if (disp == 0 && rmode == no_reloc && !base.is(ebp)) {
+  if (disp == 0 && rmode == RelocInfo::NONE && !base.is(ebp)) {
     // [base]
     set_modrm(0, base);
     if (base.is(esp)) set_sib(times_1, esp, base);
-  } else if (is_int8(disp) && rmode == no_reloc) {
+  } else if (is_int8(disp) && rmode == RelocInfo::NONE) {
     // [base + disp8]
     set_modrm(1, base);
     if (base.is(esp)) set_sib(times_1, esp, base);
@@ -206,14 +207,14 @@
                  Register index,
                  ScaleFactor scale,
                  int32_t disp,
-                 RelocMode rmode) {
+                 RelocInfo::Mode rmode) {
   ASSERT(!index.is(esp));  // illegal addressing mode
   // [base + index*scale + disp/r]
-  if (disp == 0 && rmode == no_reloc && !base.is(ebp)) {
+  if (disp == 0 && rmode == RelocInfo::NONE && !base.is(ebp)) {
     // [base + index*scale]
     set_modrm(0, esp);
     set_sib(scale, index, base);
-  } else if (is_int8(disp) && rmode == no_reloc) {
+  } else if (is_int8(disp) && rmode == RelocInfo::NONE) {
     // [base + index*scale + disp8]
     set_modrm(1, esp);
     set_sib(scale, index, base);
@@ -230,7 +231,7 @@
 Operand::Operand(Register index,
                  ScaleFactor scale,
                  int32_t disp,
-                 RelocMode rmode) {
+                 RelocInfo::Mode rmode) {
   ASSERT(!index.is(esp));  // illegal addressing mode
   // [index*scale + disp/r]
   set_modrm(0, esp);
@@ -316,8 +317,8 @@
 
   last_pc_ = NULL;
   last_bound_pos_ = 0;
-  last_position_ = kNoPosition;
-  last_position_is_statement_ = false;
+  last_position_ = RelocInfo::kNoPosition;
+  last_statement_position_ = RelocInfo::kNoPosition;
 }
 
 
@@ -468,7 +469,48 @@
         }
         return;
       }
+    } else if (instr == 0x6a && dst.is(eax)) {  // push of immediate 8 bit
+      byte imm8 = last_pc_[1];
+      if (imm8 == 0) {
+        // 6a00         push 0x0
+        // 58           pop eax
+        last_pc_[0] = 0x31;
+        last_pc_[1] = 0xc0;
+        // change to
+        // 31c0         xor eax,eax
+        last_pc_ = NULL;
+        return;
+      } else {
+        // 6a00         push 0xXX
+        // 58           pop eax
+        last_pc_[0] = 0xb8;
+        EnsureSpace ensure_space(this);
+        if ((imm8 & 0x80) != 0) {
+          EMIT(0xff);
+          EMIT(0xff);
+          EMIT(0xff);
+          // change to
+          // b8XXffffff   mov eax,0xffffffXX
+        } else {
+          EMIT(0x00);
+          EMIT(0x00);
+          EMIT(0x00);
+          // change to
+          // b8XX000000   mov eax,0x000000XX
+        }
+        last_pc_ = NULL;
+        return;
+      }
+    } else if (instr == 0x68 && dst.is(eax)) {  // push of immediate 32 bit
+      // 68XXXXXXXX   push 0xXXXXXXXX
+      // 58           pop eax
+      last_pc_[0] = 0xb8;
+      last_pc_ = NULL;
+      // change to
+      // b8XXXXXXXX   mov eax,0xXXXXXXXX
+      return;
     }
+
     // Other potential patterns for peephole:
     // 0x712716   102  890424         mov [esp], eax
     // 0x712719   105  8b1424         mov edx, [esp]
@@ -1010,7 +1052,7 @@
   last_pc_ = pc_;
   // Only use test against byte for registers that have a byte
   // variant: eax, ebx, ecx, and edx.
-  if (imm.rmode_ == no_reloc && is_uint8(imm.x_) && reg.code() < 4) {
+  if (imm.rmode_ == RelocInfo::NONE && is_uint8(imm.x_) && reg.code() < 4) {
     uint8_t imm8 = imm.x_;
     if (reg.is(eax)) {
       EMIT(0xA8);
@@ -1181,6 +1223,7 @@
     if (disp.type() == Displacement::UNCONDITIONAL_JUMP) {
       ASSERT(byte_at(fixup_pos - 1) == 0xE9);  // jmp expected
     }
+    // relative address, relative to point after address
     int imm32 = pos - (fixup_pos + sizeof(int32_t));
     long_at_put(fixup_pos, imm32);
     disp.next(L);
@@ -1288,10 +1331,10 @@
 }
 
 
-void Assembler::call(byte* entry, RelocMode rmode) {
+void Assembler::call(byte* entry, RelocInfo::Mode rmode) {
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
-  ASSERT(!is_code_target(rmode));
+  ASSERT(!RelocInfo::IsCodeTarget(rmode));
   EMIT(0xE8);
   emit(entry - (pc_ + sizeof(int32_t)), rmode);
 }
@@ -1305,10 +1348,11 @@
 }
 
 
-void Assembler::call(Handle<Code> code,  RelocMode rmode) {
+void Assembler::call(Handle<Code> code,  RelocInfo::Mode rmode) {
+  WriteRecordedPositions();
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
-  ASSERT(is_code_target(rmode));
+  ASSERT(RelocInfo::IsCodeTarget(rmode));
   EMIT(0xE8);
   emit(reinterpret_cast<intptr_t>(code.location()), rmode);
 }
@@ -1349,10 +1393,10 @@
 }
 
 
-void Assembler::jmp(byte* entry, RelocMode rmode) {
+void Assembler::jmp(byte* entry, RelocInfo::Mode rmode) {
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
-  ASSERT(!is_code_target(rmode));
+  ASSERT(!RelocInfo::IsCodeTarget(rmode));
   EMIT(0xE9);
   emit(entry - (pc_ + sizeof(int32_t)), rmode);
 }
@@ -1366,10 +1410,10 @@
 }
 
 
-void Assembler::jmp(Handle<Code> code, RelocMode rmode) {
+void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) {
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
-  ASSERT(is_code_target(rmode));
+  ASSERT(RelocInfo::IsCodeTarget(rmode));
   EMIT(0xE9);
   emit(reinterpret_cast<intptr_t>(code.location()), rmode);
 }
@@ -1407,7 +1451,7 @@
 }
 
 
-void Assembler::j(Condition cc, byte* entry, RelocMode rmode, Hint hint) {
+void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode, Hint hint) {
   EnsureSpace ensure_space(this);
   last_pc_ = pc_;
   ASSERT((0 <= cc) && (cc < 16));
@@ -1426,7 +1470,7 @@
   // 0000 1111 1000 tttn #32-bit disp
   EMIT(0x0F);
   EMIT(0x80 | cc);
-  emit(reinterpret_cast<intptr_t>(code.location()), code_target);
+  emit(reinterpret_cast<intptr_t>(code.location()), RelocInfo::CODE_TARGET);
 }
 
 
@@ -1844,38 +1888,46 @@
 
 
 void Assembler::RecordJSReturn() {
+  WriteRecordedPositions();
   EnsureSpace ensure_space(this);
-  RecordRelocInfo(js_return);
+  RecordRelocInfo(RelocInfo::JS_RETURN);
 }
 
 
 void Assembler::RecordComment(const char* msg) {
   if (FLAG_debug_code) {
     EnsureSpace ensure_space(this);
-    RecordRelocInfo(comment, reinterpret_cast<intptr_t>(msg));
+    RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
   }
 }
 
 
 void Assembler::RecordPosition(int pos) {
-  if (pos == kNoPosition) return;
-  ASSERT(position >= 0);
-  if (pos == last_position_) return;
-  EnsureSpace ensure_space(this);
-  RecordRelocInfo(position, pos);
+  if (pos == RelocInfo::kNoPosition) return;
+  ASSERT(pos >= 0);
   last_position_ = pos;
-  last_position_is_statement_ = false;
 }
 
 
 void Assembler::RecordStatementPosition(int pos) {
-  if (pos == kNoPosition) return;
-  ASSERT(position >= 0);
-  if (pos == last_position_) return;
-  EnsureSpace ensure_space(this);
-  RecordRelocInfo(statement_position, pos);
-  last_position_ = pos;
-  last_position_is_statement_ = true;
+  if (pos == RelocInfo::kNoPosition) return;
+  ASSERT(pos >= 0);
+  last_statement_position_ = pos;
+}
+
+
+void Assembler::WriteRecordedPositions() {
+  if (last_statement_position_ != RelocInfo::kNoPosition) {
+    EnsureSpace ensure_space(this);
+    RecordRelocInfo(RelocInfo::STATEMENT_POSITION, last_statement_position_);
+  }
+  if ((last_position_ != RelocInfo::kNoPosition) &&
+      (last_position_ != last_statement_position_)) {
+    EnsureSpace ensure_space(this);
+    RecordRelocInfo(RelocInfo::POSITION, last_position_);
+  }
+  last_statement_position_ = RelocInfo::kNoPosition;
+  last_position_ = RelocInfo::kNoPosition;
 }
 
 
@@ -1932,10 +1984,15 @@
 
   // relocate runtime entries
   for (RelocIterator it(desc); !it.done(); it.next()) {
-    RelocMode rmode = it.rinfo()->rmode();
-    if (rmode == runtime_entry) {
+    RelocInfo::Mode rmode = it.rinfo()->rmode();
+    if (rmode == RelocInfo::RUNTIME_ENTRY) {
       int32_t* p = reinterpret_cast<int32_t*>(it.rinfo()->pc());
       *p -= pc_delta;  // relocate entry
+    } else if (rmode == RelocInfo::INTERNAL_REFERENCE) {
+      int32_t* p = reinterpret_cast<int32_t*>(it.rinfo()->pc());
+      if (*p != 0) {  // 0 means uninitialized.
+        *p += pc_delta;
+      }
     }
   }
 
@@ -1975,7 +2032,7 @@
   adr.set_reg(reg);
   memmove(pc_, adr.buf_, adr.len_);
   pc_ += adr.len_;
-  if (adr.len_ >= sizeof(int32_t) && adr.rmode_ != no_reloc) {
+  if (adr.len_ >= sizeof(int32_t) && adr.rmode_ != RelocInfo::NONE) {
     pc_ -= sizeof(int32_t);  // pc_ must be *at* disp32
     RecordRelocInfo(adr.rmode_);
     pc_ += sizeof(int32_t);
@@ -1987,7 +2044,7 @@
   adr.set_reg(reg);
   memmove(pc_, adr.buf_, adr.len_);
   pc_ += adr.len_;
-  if (adr.len_ >= sizeof(int32_t) && adr.rmode_ != no_reloc) {
+  if (adr.len_ >= sizeof(int32_t) && adr.rmode_ != RelocInfo::NONE) {
     pc_ -= sizeof(int32_t);  // pc_ must be *at* disp32
     RecordRelocInfo(adr.rmode_);
     pc_ += sizeof(int32_t);
@@ -2003,10 +2060,16 @@
 }
 
 
-void Assembler::RecordRelocInfo(RelocMode rmode, intptr_t data) {
-  ASSERT(rmode != no_reloc);
+void Assembler::dd(uint32_t data, RelocInfo::Mode reloc_info) {
+  EnsureSpace ensure_space(this);
+  emit(data, reloc_info);
+}
+
+
+void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
+  ASSERT(rmode != RelocInfo::NONE);
   // Don't record external references unless the heap will be serialized.
-  if (rmode == external_reference &&
+  if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
       !Serializer::enabled() &&
       !FLAG_debug_code) {
     return;
@@ -2015,5 +2078,14 @@
   reloc_info_writer.Write(&rinfo);
 }
 
+void Assembler::WriteInternalReference(int position, const Label& bound_label) {
+  ASSERT(bound_label.is_bound());
+  ASSERT(0 <= position);
+  ASSERT(position + static_cast<int>(sizeof(uint32_t)) <= pc_offset());
+  ASSERT(long_at(position) == 0);  // only initialize once!
+
+  uint32_t label_loc = reinterpret_cast<uint32_t>(addr_at(bound_label.pos()));
+  long_at_put(position, label_loc);
+}
 
 } }  // namespace v8::internal
diff --git a/src/assembler-ia32.h b/src/assembler-ia32.h
index 700253e..7c67867 100644
--- a/src/assembler-ia32.h
+++ b/src/assembler-ia32.h
@@ -183,12 +183,14 @@
   inline explicit Immediate(Handle<Object> handle);
   inline explicit Immediate(Smi* value);
 
-  bool is_zero() const  { return x_ == 0 && rmode_ == no_reloc; }
-  bool is_int8() const  { return -128 <= x_ && x_ < 128 && rmode_ == no_reloc; }
+  bool is_zero() const { return x_ == 0 && rmode_ == RelocInfo::NONE; }
+  bool is_int8() const {
+    return -128 <= x_ && x_ < 128 && rmode_ == RelocInfo::NONE;
+  }
 
  private:
   int x_;
-  RelocMode rmode_;
+  RelocInfo::Mode rmode_;
 
   friend class Assembler;
 };
@@ -211,35 +213,36 @@
   INLINE(explicit Operand(Register reg));
 
   // [disp/r]
-  INLINE(explicit Operand(int32_t disp, RelocMode rmode));
+  INLINE(explicit Operand(int32_t disp, RelocInfo::Mode rmode));
   // disp only must always be relocated
 
   // [base + disp/r]
-  explicit Operand(Register base, int32_t disp, RelocMode rmode = no_reloc);
+  explicit Operand(Register base, int32_t disp,
+                   RelocInfo::Mode rmode = RelocInfo::NONE);
 
   // [base + index*scale + disp/r]
   explicit Operand(Register base,
                    Register index,
                    ScaleFactor scale,
                    int32_t disp,
-                   RelocMode rmode = no_reloc);
+                   RelocInfo::Mode rmode = RelocInfo::NONE);
 
   // [index*scale + disp/r]
   explicit Operand(Register index,
                    ScaleFactor scale,
                    int32_t disp,
-                   RelocMode rmode = no_reloc);
+                   RelocInfo::Mode rmode = RelocInfo::NONE);
 
   static Operand StaticVariable(const ExternalReference& ext) {
     return Operand(reinterpret_cast<int32_t>(ext.address()),
-                   external_reference);
+                   RelocInfo::EXTERNAL_REFERENCE);
   }
 
   static Operand StaticArray(Register index,
                              ScaleFactor scale,
                              const ExternalReference& arr) {
     return Operand(index, scale, reinterpret_cast<int32_t>(arr.address()),
-                   external_reference);
+                   RelocInfo::EXTERNAL_REFERENCE);
   }
 
   // Returns true if this Operand is a wrapper for the specified register.
@@ -251,13 +254,13 @@
   // The number of bytes in buf_.
   unsigned int len_;
   // Only valid if len_ > 4.
-  RelocMode rmode_;
+  RelocInfo::Mode rmode_;
 
   inline void set_modrm(int mod,  // reg == 0
                         Register rm);
   inline void set_sib(ScaleFactor scale, Register index, Register base);
   inline void set_disp8(int8_t disp);
-  inline void set_dispr(int32_t disp, RelocMode rmode);
+  inline void set_dispr(int32_t disp, RelocInfo::Mode rmode);
   inline void set_reg(Register reg) const;
 
   friend class Assembler;
@@ -280,7 +283,7 @@
 //
 // Displacement _data field layout
 //
-// |31.....1|.......0|
+// |31.....1| ......0|
 // [  next  |  type  |
 
 class Displacement BASE_EMBEDDED {
@@ -317,6 +320,7 @@
 };
 
 
+
 // CpuFeatures keeps track of which features are supported by the target CPU.
 // Supported features must be enabled by a Scope before use.
 // Example:
@@ -573,19 +577,19 @@
 
   // Calls
   void call(Label* L);
-  void call(byte* entry, RelocMode rmode);
+  void call(byte* entry, RelocInfo::Mode rmode);
   void call(const Operand& adr);
-  void call(Handle<Code> code, RelocMode rmode);
+  void call(Handle<Code> code, RelocInfo::Mode rmode);
 
   // Jumps
   void jmp(Label* L);  // unconditional jump to L
-  void jmp(byte* entry, RelocMode rmode);
+  void jmp(byte* entry, RelocInfo::Mode rmode);
   void jmp(const Operand& adr);
-  void jmp(Handle<Code> code, RelocMode rmode);
+  void jmp(Handle<Code> code, RelocInfo::Mode rmode);
 
   // Conditional jumps
   void j(Condition cc, Label* L, Hint hint = no_hint);
-  void j(Condition cc, byte* entry, RelocMode rmode, Hint hint = no_hint);
+  void j(Condition cc, byte* entry, RelocInfo::Mode rmode, Hint hint = no_hint);
   void j(Condition cc, Handle<Code> code, Hint hint = no_hint);
 
   // Floating-point operations
@@ -673,12 +677,20 @@
 
   void RecordPosition(int pos);
   void RecordStatementPosition(int pos);
+  void WriteRecordedPositions();
+
+  // Writes a single word of data in the code stream.
+  // Used for inline tables, e.g., jump-tables.
+  void dd(uint32_t data, RelocInfo::Mode reloc_info);
+
+  // Writes the absolute address of a bound label at the given position in
+  // the generated code. That positions should have the relocation mode
+  // internal_reference!
+  void WriteInternalReference(int position, const Label& bound_label);
 
   int pc_offset() const  { return pc_ - buffer_; }
+  int last_statement_position() const  { return last_statement_position_; }
   int last_position() const  { return last_position_; }
-  bool last_position_is_statement() const  {
-    return last_position_is_statement_;
-  }
 
   // Check if there is less than kGap bytes available in the buffer.
   // If this is the case, we need to grow the buffer before emitting
@@ -725,7 +737,7 @@
 
   // source position information
   int last_position_;
-  bool last_position_is_statement_;
+  int last_statement_position_;
 
   byte* addr_at(int pos)  { return buffer_ + pos; }
   byte byte_at(int pos)  { return buffer_[pos]; }
@@ -740,7 +752,7 @@
   void GrowBuffer();
   inline void emit(uint32_t x);
   inline void emit(Handle<Object> handle);
-  inline void emit(uint32_t x, RelocMode rmode);
+  inline void emit(uint32_t x, RelocInfo::Mode rmode);
   inline void emit(const Immediate& x);
 
   // instruction generation
@@ -768,7 +780,7 @@
   inline void emit_disp(Label* L, Displacement::Type type);
 
   // record reloc info for current pc_
-  void RecordRelocInfo(RelocMode rmode, intptr_t data = 0);
+  void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
 
   friend class CodePatcher;
   friend class EnsureSpace;
diff --git a/src/assembler.cc b/src/assembler.cc
index 7b4ffc7..9e63ac5 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -78,7 +78,7 @@
 // statement_position: [6 bits pc delta] 10,
 //                     [7 bits signed data delta] 1
 //
-// any nondata mode:   00 [4 bits rmode] 11,
+// any nondata mode:   00 [4 bits rmode] 11,  // rmode: 0..13 only
 //                     00 [6 bits pc delta]
 //
 // pc-jump:            00 1111 11,
@@ -192,21 +192,21 @@
 #endif
   Counters::reloc_info_count.Increment();
   ASSERT(rinfo->pc() - last_pc_ >= 0);
-  ASSERT(reloc_mode_count < kMaxRelocModes);
+  ASSERT(RelocInfo::NUMBER_OF_MODES < kMaxRelocModes);
   // Use unsigned delta-encoding for pc.
   uint32_t pc_delta = rinfo->pc() - last_pc_;
-  RelocMode rmode = rinfo->rmode();
+  RelocInfo::Mode rmode = rinfo->rmode();
 
   // The two most common modes are given small tags, and usually fit in a byte.
-  if (rmode == embedded_object) {
+  if (rmode == RelocInfo::EMBEDDED_OBJECT) {
     WriteTaggedPC(pc_delta, kEmbeddedObjectTag);
-  } else if (rmode == code_target) {
+  } else if (rmode == RelocInfo::CODE_TARGET) {
     WriteTaggedPC(pc_delta, kCodeTargetTag);
-  } else if (rmode == position || rmode == statement_position) {
+  } else if (RelocInfo::IsPosition(rmode)) {
     // Use signed delta-encoding for data.
     int32_t data_delta = rinfo->data() - last_data_;
-    int pos_type_tag = rmode == position ? kNonstatementPositionTag
-                                         : kStatementPositionTag;
+    int pos_type_tag = rmode == RelocInfo::POSITION ? kNonstatementPositionTag
+                                                    : kStatementPositionTag;
     // Check if data is small enough to fit in a tagged byte.
     if (is_intn(data_delta, kSmallDataBits)) {
       WriteTaggedPC(pc_delta, kPositionTag);
@@ -218,7 +218,7 @@
       WriteExtraTaggedData(data_delta, pos_type_tag);
       last_data_ = rinfo->data();
     }
-  } else if (rmode == comment) {
+  } else if (RelocInfo::IsComment(rmode)) {
     // Comments are normally not generated, so we use the costly encoding.
     WriteExtraTaggedPC(pc_delta, kPCJumpTag);
     WriteExtraTaggedData(rinfo->data() - last_data_, kCommentTag);
@@ -297,14 +297,14 @@
 }
 
 
-inline RelocMode RelocIterator::DebugInfoModeFromTag(int tag) {
+inline RelocInfo::Mode RelocIterator::DebugInfoModeFromTag(int tag) {
   if (tag == kStatementPositionTag) {
-    return statement_position;
+    return RelocInfo::STATEMENT_POSITION;
   } else if (tag == kNonstatementPositionTag) {
-    return position;
+    return RelocInfo::POSITION;
   } else {
     ASSERT(tag == kCommentTag);
-    return comment;
+    return RelocInfo::COMMENT;
   }
 }
 
@@ -320,14 +320,14 @@
     int tag = AdvanceGetTag();
     if (tag == kEmbeddedObjectTag) {
       ReadTaggedPC();
-      if (SetMode(embedded_object)) return;
+      if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
     } else if (tag == kCodeTargetTag) {
       ReadTaggedPC();
       if (*(reinterpret_cast<int**>(rinfo_.pc())) ==
           reinterpret_cast<int*>(0x61)) {
         tag = 0;
       }
-      if (SetMode(code_target)) return;
+      if (SetMode(RelocInfo::CODE_TARGET)) return;
     } else if (tag == kPositionTag) {
       ReadTaggedPC();
       Advance();
@@ -362,7 +362,7 @@
         }
       } else {
         AdvanceReadPC();
-        if (SetMode(static_cast<RelocMode>(extra_tag))) return;
+        if (SetMode(static_cast<RelocInfo::Mode>(extra_tag))) return;
       }
     }
   }
@@ -401,37 +401,37 @@
 
 
 #ifdef ENABLE_DISASSEMBLER
-const char* RelocInfo::RelocModeName(RelocMode rmode) {
+const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
   switch (rmode) {
-    case no_reloc:
+    case RelocInfo::NONE:
       return "no reloc";
-    case embedded_object:
+    case RelocInfo::EMBEDDED_OBJECT:
       return "embedded object";
-    case embedded_string:
+    case RelocInfo::EMBEDDED_STRING:
       return "embedded string";
-    case js_construct_call:
+    case RelocInfo::CONSTRUCT_CALL:
       return "code target (js construct call)";
-    case exit_js_frame:
-      return "code target (exit js frame)";
-    case code_target_context:
+    case RelocInfo::CODE_TARGET_CONTEXT:
       return "code target (context)";
-    case code_target:
+    case RelocInfo::CODE_TARGET:
       return "code target";
-    case runtime_entry:
+    case RelocInfo::RUNTIME_ENTRY:
       return "runtime entry";
-    case js_return:
+    case RelocInfo::JS_RETURN:
       return "js return";
-    case comment:
+    case RelocInfo::COMMENT:
       return "comment";
-    case position:
+    case RelocInfo::POSITION:
       return "position";
-    case statement_position:
+    case RelocInfo::STATEMENT_POSITION:
       return "statement position";
-    case external_reference:
+    case RelocInfo::EXTERNAL_REFERENCE:
       return "external reference";
-    case reloc_mode_count:
+    case RelocInfo::INTERNAL_REFERENCE:
+      return "internal reference";
+    case RelocInfo::NUMBER_OF_MODES:
       UNREACHABLE();
-      return "reloc_mode_count";
+      return "number_of_modes";
   }
   return "unknown relocation type";
 }
@@ -439,21 +439,21 @@
 
 void RelocInfo::Print() {
   PrintF("%p  %s", pc_, RelocModeName(rmode_));
-  if (rmode_ == comment) {
+  if (IsComment(rmode_)) {
     PrintF("  (%s)", data_);
-  } else if (rmode_ == embedded_object) {
+  } else if (rmode_ == EMBEDDED_OBJECT) {
     PrintF("  (");
     target_object()->ShortPrint();
     PrintF(")");
-  } else if (rmode_ == external_reference) {
+  } else if (rmode_ == EXTERNAL_REFERENCE) {
     ExternalReferenceEncoder ref_encoder;
     PrintF(" (%s)  (%p)",
            ref_encoder.NameOfAddress(*target_reference_address()),
            *target_reference_address());
-  } else if (is_code_target(rmode_)) {
+  } else if (IsCodeTarget(rmode_)) {
     Code* code = Debug::GetCodeTarget(target_address());
     PrintF(" (%s)  (%p)", Code::Kind2String(code->kind()), target_address());
-  } else if (is_position(rmode_)) {
+  } else if (IsPosition(rmode_)) {
     PrintF("  (%d)", data());
   }
 
@@ -465,13 +465,12 @@
 #ifdef DEBUG
 void RelocInfo::Verify() {
   switch (rmode_) {
-    case embedded_object:
+    case EMBEDDED_OBJECT:
       Object::VerifyPointer(target_object());
       break;
-    case js_construct_call:
-    case exit_js_frame:
-    case code_target_context:
-    case code_target: {
+    case CONSTRUCT_CALL:
+    case CODE_TARGET_CONTEXT:
+    case CODE_TARGET: {
       // convert inline target address to code object
       Address addr = target_address();
       ASSERT(addr != NULL);
@@ -482,16 +481,17 @@
       ASSERT(code->address() == HeapObject::cast(found)->address());
       break;
     }
-    case embedded_string:
-    case runtime_entry:
-    case js_return:
-    case comment:
-    case position:
-    case statement_position:
-    case external_reference:
-    case no_reloc:
+    case RelocInfo::EMBEDDED_STRING:
+    case RUNTIME_ENTRY:
+    case JS_RETURN:
+    case COMMENT:
+    case POSITION:
+    case STATEMENT_POSITION:
+    case EXTERNAL_REFERENCE:
+    case INTERNAL_REFERENCE:
+    case NONE:
       break;
-    case reloc_mode_count:
+    case NUMBER_OF_MODES:
       UNREACHABLE();
       break;
   }
diff --git a/src/assembler.h b/src/assembler.h
index d50de61..e5b9f04 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -130,92 +130,6 @@
 // -----------------------------------------------------------------------------
 // Relocation information
 
-// The constant kNoPosition is used with the collecting of source positions
-// in the relocation information. Two types of source positions are collected
-// "position" (RelocMode position) and "statement position" (RelocMode
-// statement_position). The "position" is collected at places in the source
-// code which are of interest when making stack traces to pin-point the source
-// location of a stack frame as close as possible. The "statement position" is
-// collected at the beginning at each statement, and is used to indicate
-// possible break locations. kNoPosition is used to indicate an
-// invalid/uninitialized position value.
-static const int kNoPosition = -1;
-
-
-enum RelocMode {
-  // Please note the order is important (see is_code_target, is_gc_reloc_mode).
-  js_construct_call,   // code target that is an exit JavaScript frame stub.
-  exit_js_frame,       // code target that is an exit JavaScript frame stub.
-  code_target_context,  // code target used for contextual loads.
-  code_target,         // code target which is not any of the above.
-  embedded_object,
-  embedded_string,
-
-  // Everything after runtime_entry (inclusive) is not GC'ed.
-  runtime_entry,
-  js_return,  // Marks start of the ExitJSFrame code.
-  comment,
-  position,  // See comment for kNoPosition above.
-  statement_position,  // See comment for kNoPosition above.
-  external_reference,  // The address of an external C++ function.
-  // add more as needed
-  no_reloc,  // never recorded
-
-  // Pseudo-types
-  reloc_mode_count,
-  last_code_enum = code_target,
-  last_gced_enum = embedded_string
-};
-
-
-inline int RelocMask(RelocMode mode) {
-  return 1 << mode;
-}
-
-
-inline bool is_js_construct_call(RelocMode mode) {
-  return mode == js_construct_call;
-}
-
-
-inline bool is_exit_js_frame(RelocMode mode) {
-  return mode == exit_js_frame;
-}
-
-
-inline bool is_code_target(RelocMode mode) {
-  return mode <= last_code_enum;
-}
-
-
-// Is the relocation mode affected by GC?
-inline bool is_gc_reloc_mode(RelocMode mode) {
-  return mode <= last_gced_enum;
-}
-
-
-inline bool is_js_return(RelocMode mode) {
-  return mode == js_return;
-}
-
-
-inline bool is_comment(RelocMode mode) {
-  return mode == comment;
-}
-
-
-inline bool is_position(RelocMode mode) {
-  return mode == position || mode == statement_position;
-}
-
-
-inline bool is_statement_position(RelocMode mode) {
-  return mode == statement_position;
-}
-
-inline bool is_external_reference(RelocMode mode) {
-  return mode == external_reference;
-}
 
 // Relocation information consists of the address (pc) of the datum
 // to which the relocation information applies, the relocation mode
@@ -226,22 +140,89 @@
 
 class RelocInfo BASE_EMBEDDED {
  public:
+  // The constant kNoPosition is used with the collecting of source positions
+  // in the relocation information. Two types of source positions are collected
+  // "position" (RelocMode position) and "statement position" (RelocMode
+  // statement_position). The "position" is collected at places in the source
+  // code which are of interest when making stack traces to pin-point the source
+  // location of a stack frame as close as possible. The "statement position" is
+  // collected at the beginning at each statement, and is used to indicate
+  // possible break locations. kNoPosition is used to indicate an
+  // invalid/uninitialized position value.
+  static const int kNoPosition = -1;
+
+  enum Mode {
+    // Please note the order is important (see IsCodeTarget, IsGCRelocMode).
+    CONSTRUCT_CALL,  // code target that is a call to a JavaScript constructor.
+    CODE_TARGET_CONTEXT,  // code target used for contextual loads.
+    CODE_TARGET,         // code target which is not any of the above.
+    EMBEDDED_OBJECT,
+    EMBEDDED_STRING,
+
+    // Everything after runtime_entry (inclusive) is not GC'ed.
+    RUNTIME_ENTRY,
+    JS_RETURN,  // Marks start of the ExitJSFrame code.
+    COMMENT,
+    POSITION,  // See comment for kNoPosition above.
+    STATEMENT_POSITION,  // See comment for kNoPosition above.
+    EXTERNAL_REFERENCE,  // The address of an external C++ function.
+    INTERNAL_REFERENCE,  // An address inside the same function.
+
+    // add more as needed
+    // Pseudo-types
+    NUMBER_OF_MODES,  // must be no greater than 14 - see RelocInfoWriter
+    NONE,  // never recorded
+    LAST_CODE_ENUM = CODE_TARGET,
+    LAST_GCED_ENUM = EMBEDDED_STRING
+  };
+
+
   RelocInfo() {}
-  RelocInfo(byte* pc, RelocMode rmode, intptr_t data)
+  RelocInfo(byte* pc, Mode rmode, intptr_t data)
       : pc_(pc), rmode_(rmode), data_(data) {
   }
 
+  static inline bool IsConstructCall(Mode mode) {
+    return mode == CONSTRUCT_CALL;
+  }
+  static inline bool IsCodeTarget(Mode mode) {
+    return mode <= LAST_CODE_ENUM;
+  }
+  // Is the relocation mode affected by GC?
+  static inline bool IsGCRelocMode(Mode mode) {
+    return mode <= LAST_GCED_ENUM;
+  }
+  static inline bool IsJSReturn(Mode mode) {
+    return mode == JS_RETURN;
+  }
+  static inline bool IsComment(Mode mode) {
+    return mode == COMMENT;
+  }
+  static inline bool IsPosition(Mode mode) {
+    return mode == POSITION || mode == STATEMENT_POSITION;
+  }
+  static inline bool IsStatementPosition(Mode mode) {
+    return mode == STATEMENT_POSITION;
+  }
+  static inline bool IsExternalReference(Mode mode) {
+    return mode == EXTERNAL_REFERENCE;
+  }
+  static inline bool IsInternalReference(Mode mode) {
+    return mode == INTERNAL_REFERENCE;
+  }
+  static inline int ModeMask(Mode mode) { return 1 << mode; }
+
   // Accessors
   byte* pc() const  { return pc_; }
   void set_pc(byte* pc) { pc_ = pc; }
-  RelocMode rmode() const {  return rmode_; }
+  Mode rmode() const {  return rmode_; }
   intptr_t data() const  { return data_; }
 
   // Apply a relocation by delta bytes
   INLINE(void apply(int delta));
 
   // Read/modify the code target in the branch/call instruction this relocation
-  // applies to; can only be called if this->is_code_target(rmode_)
+  // applies to; can only be called if IsCodeTarget(rmode_)
   INLINE(Address target_address());
   INLINE(void set_target_address(Address target));
   INLINE(Object* target_object());
@@ -270,7 +251,7 @@
 
 #ifdef ENABLE_DISASSEMBLER
   // Printing
-  static const char* RelocModeName(RelocMode rmode);
+  static const char* RelocModeName(Mode rmode);
   void Print();
 #endif  // ENABLE_DISASSEMBLER
 #ifdef DEBUG
@@ -278,9 +259,9 @@
   void Verify();
 #endif
 
-  static const int kCodeTargetMask = (1 << (last_code_enum + 1)) - 1;
-  static const int kPositionMask = 1 << position | 1 << statement_position;
-  static const int kDebugMask = kPositionMask | 1 << comment;
+  static const int kCodeTargetMask = (1 << (LAST_CODE_ENUM + 1)) - 1;
+  static const int kPositionMask = 1 << POSITION | 1 << STATEMENT_POSITION;
+  static const int kDebugMask = kPositionMask | 1 << COMMENT;
   static const int kApplyMask;  // Modes affected by apply. Depends on arch.
 
  private:
@@ -289,7 +270,7 @@
   // referencing the constant pool entry (except when rmode_ ==
   // comment).
   byte* pc_;
-  RelocMode rmode_;
+  Mode rmode_;
   intptr_t data_;
   friend class RelocIterator;
 };
@@ -375,11 +356,11 @@
   int GetPositionTypeTag();
   void ReadTaggedData();
 
-  static RelocMode DebugInfoModeFromTag(int tag);
+  static RelocInfo::Mode DebugInfoModeFromTag(int tag);
 
   // If the given mode is wanted, set it in rinfo_ and return true.
   // Else return false. Used for efficiently skipping unwanted modes.
-  bool SetMode(RelocMode mode) {
+  bool SetMode(RelocInfo::Mode mode) {
     return (mode_mask_ & 1 << mode) ? (rinfo_.rmode_ = mode, true) : false;
   }
 
diff --git a/src/ast.cc b/src/ast.cc
index 6e60078..2f21d92 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -146,7 +146,6 @@
 }
 
 
-
 void LabelCollector::AddLabel(Label* label) {
   // Add the label to the collector, but discard duplicates.
   int length = labels_->length();
diff --git a/src/ast.h b/src/ast.h
index 8bece05..7cde61a 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -101,7 +101,7 @@
 
 class Node: public ZoneObject {
  public:
-  Node(): statement_pos_(kNoPosition) { }
+  Node(): statement_pos_(RelocInfo::kNoPosition) { }
   virtual ~Node() { }
   virtual void Accept(Visitor* v) = 0;
 
@@ -1167,7 +1167,7 @@
         start_position_(start_position),
         end_position_(end_position),
         is_expression_(is_expression),
-        function_token_position_(kNoPosition) {
+        function_token_position_(RelocInfo::kNoPosition) {
   }
 
   virtual void Accept(Visitor* v);
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 71c1076..cfc29df 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -655,7 +655,7 @@
   {  // -- R e g E x p
     // Builtin functions for RegExp.prototype.
     Handle<JSFunction> regexp_fun =
-        InstallFunction(global, "RegExp", JS_VALUE_TYPE, JSValue::kSize,
+        InstallFunction(global, "RegExp", JS_REGEXP_TYPE, JSRegExp::kSize,
                         Top::initial_object_prototype(), Builtins::Illegal,
                         true);
 
diff --git a/src/builtins-arm.cc b/src/builtins-arm.cc
index da177ac..c030cde 100644
--- a/src/builtins-arm.cc
+++ b/src/builtins-arm.cc
@@ -117,7 +117,6 @@
   // r0: number of arguments
   // r1: constructor function
   Label return_site;
-  __ RecordPosition(position);
   ParameterCount actual(r0);
   __ InvokeFunction(r1, actual, CALL_FUNCTION);
   __ bind(&return_site);
@@ -169,7 +168,7 @@
   // sp[1]: constructor function
   // sp[2]: number of arguments (smi-tagged)
   __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
   __ add(sp, sp, Operand(kPointerSize));
   __ mov(pc, Operand(lr));
@@ -233,7 +232,7 @@
   __ mov(r0, Operand(r3));
   if (is_construct) {
     __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
-            code_target);
+            RelocInfo::CODE_TARGET);
   } else {
     ParameterCount actual(r0);
     __ InvokeFunction(r1, actual, CALL_FUNCTION);
@@ -241,7 +240,7 @@
 
   // Exit the JS frame and remove the parameters (except function), and return.
   // Respect ABI stack constraint.
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
   __ mov(pc, lr);
 
   // r0: result
@@ -338,7 +337,7 @@
     __ pop(r0);
     __ mov(r0, Operand(r0, ASR, kSmiTagSize));
 
-    __ ExitInternalFrame();
+    __ LeaveInternalFrame();
     __ b(&patch_receiver);
 
     // Use the global object from the called function as the receiver.
@@ -386,7 +385,8 @@
     __ b(ne, &invoke);
     __ mov(r2, Operand(0));  // expected arguments is 0 for CALL_NON_FUNCTION
     __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
-    __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)), code_target);
+    __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+                         RelocInfo::CODE_TARGET);
 
     __ bind(&invoke);
     __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
@@ -397,7 +397,8 @@
            MemOperand(r3, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
     __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
     __ cmp(r2, r0);  // Check formal and actual parameter counts.
-    __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)), code_target, ne);
+    __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+                         RelocInfo::CODE_TARGET, ne);
 
     // 7. Jump to the code in r3 without checking arguments.
     ParameterCount expected(0);
@@ -424,12 +425,6 @@
   // Eagerly check for stack-overflow before starting to push the arguments.
   // r0: number of arguments
   Label okay;
-  { Label L;
-    __ mov(r1, Operand(391864 << kSmiTagSize));
-    __ cmp(r0, r1);
-    __ b(cc, &L);
-    __ bind(&L);
-  }
   ExternalReference stack_guard_limit_address =
       ExternalReference::address_of_stack_guard_limit();
   __ mov(r2, Operand(stack_guard_limit_address));
@@ -530,7 +525,7 @@
   __ InvokeFunction(r1, actual, CALL_FUNCTION);
 
   // Tear down the internal frame and remove function, receiver and args.
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
   __ add(sp, sp, Operand(3 * kPointerSize));
   __ mov(pc, lr);
 }
diff --git a/src/builtins-ia32.cc b/src/builtins-ia32.cc
index dbf8c5c..de642af 100644
--- a/src/builtins-ia32.cc
+++ b/src/builtins-ia32.cc
@@ -266,7 +266,6 @@
 
   // Call the function.
   Label return_site;
-  __ RecordPosition(position);
   ParameterCount actual(eax);
   __ InvokeFunction(edi, actual, CALL_FUNCTION);
   __ bind(&return_site);
@@ -298,7 +297,7 @@
   // Restore the arguments count and exit the internal frame.
   __ bind(&exit);
   __ mov(ebx, Operand(esp, kPointerSize));  // get arguments count
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
 
   // Remove caller arguments from the stack and return.
   ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
@@ -355,7 +354,7 @@
   // Invoke the code.
   if (is_construct) {
     __ call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
-            code_target);
+            RelocInfo::CODE_TARGET);
   } else {
     ParameterCount actual(eax);
     __ InvokeFunction(edi, actual, CALL_FUNCTION);
@@ -364,7 +363,7 @@
   // Exit the JS frame. Notice that this also removes the empty
   // context and the function left on the stack by the code
   // invocation.
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
   __ ret(1 * kPointerSize);  // remove receiver
 }
 
@@ -451,7 +450,7 @@
     __ pop(eax);
     __ shr(eax, kSmiTagSize);
 
-    __ ExitInternalFrame();
+    __ LeaveInternalFrame();
     __ jmp(&patch_receiver);
 
     // Use the global object from the called function as the receiver.
@@ -490,7 +489,8 @@
     __ j(not_zero, &invoke, taken);
     __ xor_(ebx, Operand(ebx));
     __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
-    __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)), code_target);
+    __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+           RelocInfo::CODE_TARGET);
 
     __ bind(&invoke);
     __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
@@ -592,7 +592,7 @@
 
   // Use inline caching to speed up access to arguments.
   Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
-  __ call(ic, code_target);
+  __ call(ic, RelocInfo::CODE_TARGET);
 
   // Remove IC arguments from the stack and push the nth argument.
   __ add(Operand(esp), Immediate(2 * kPointerSize));
@@ -613,7 +613,7 @@
   __ mov(edi, Operand(ebp, 4 * kPointerSize));
   __ InvokeFunction(edi, actual, CALL_FUNCTION);
 
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
   __ ret(3 * kPointerSize);  // remove this, receiver, and arguments
 }
 
@@ -771,7 +771,7 @@
   __ PopRegistersToMemory(pointer_regs);
 
   // Get rid of the internal frame.
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
 
   // If this call did not replace a call but patched other code then there will
   // be an unwanted return address left on the stack. Here we get rid of that.
diff --git a/src/checks.h b/src/checks.h
index 3908ab4..020f129 100644
--- a/src/checks.h
+++ b/src/checks.h
@@ -37,14 +37,20 @@
 
 // The FATAL, UNREACHABLE and UNIMPLEMENTED macros are useful during
 // development, but they should not be relied on in the final product.
+#ifdef DEBUG
 #define FATAL(msg)                              \
   V8_Fatal(__FILE__, __LINE__, "%s", (msg))
-
-#define UNREACHABLE()                           \
-  V8_Fatal(__FILE__, __LINE__, "unreachable code")
-
 #define UNIMPLEMENTED()                         \
   V8_Fatal(__FILE__, __LINE__, "unimplemented code")
+#define UNREACHABLE()                           \
+  V8_Fatal(__FILE__, __LINE__, "unreachable code")
+#else
+#define FATAL(msg)                              \
+  V8_Fatal("", 0, "%s", (msg))
+#define UNIMPLEMENTED()                         \
+  V8_Fatal("", 0, "unimplemented code")
+#define UNREACHABLE() ((void) 0)
+#endif
 
 
 // Used by the CHECK macro -- should not be called directly.
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index e98c158..d337124 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -65,7 +65,7 @@
     // Add unresolved entries in the code to the fixup list.
     Bootstrapper::AddFixup(*code, &masm);
 
-    LOG(CodeCreateEvent(GetName(), *code, ""));
+    LOG(CodeCreateEvent("Stub", *code, GetName()));
     Counters::total_stubs_code_size.Increment(code->instruction_size());
 
 #ifdef DEBUG
diff --git a/src/codegen-arm.cc b/src/codegen-arm.cc
index 9a17746..dfe5745 100644
--- a/src/codegen-arm.cc
+++ b/src/codegen-arm.cc
@@ -826,7 +826,7 @@
     Literal key(variable->name());
     // TODO(1241834): Fetch the position from the variable instead of using
     // no position.
-    Property property(&global, &key, kNoPosition);
+    Property property(&global, &key, RelocInfo::kNoPosition);
     Load(&property);
   } else {
     Load(x, CodeGenState::LOAD_TYPEOF_EXPR);
@@ -1629,22 +1629,22 @@
 void CEntryStub::GenerateCore(MacroAssembler* masm,
                               Label* throw_normal_exception,
                               Label* throw_out_of_memory_exception,
-                              bool do_gc,
-                              bool do_restore) {
+                              StackFrame::Type frame_type,
+                              bool do_gc) {
   // r0: result parameter for PerformGC, if any
   // r4: number of arguments including receiver  (C callee-saved)
   // r5: pointer to builtin function  (C callee-saved)
+  // r6: pointer to the first argument (C callee-saved)
 
   if (do_gc) {
-    __ Call(FUNCTION_ADDR(Runtime::PerformGC), runtime_entry);  // passing r0
+    // Passing r0.
+    __ Call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
   }
 
   // Call C built-in.
-  // r0 = argc.
+  // r0 = argc, r1 = argv
   __ mov(r0, Operand(r4));
-  // r1 = argv.
-  __ add(r1, fp, Operand(r4, LSL, kPointerSizeLog2));
-  __ add(r1, r1, Operand(ExitFrameConstants::kPPDisplacement - kPointerSize));
+  __ mov(r1, Operand(r6));
 
   // TODO(1242173): To let the GC traverse the return address of the exit
   // frames, we need to know where the return address is. Right now,
@@ -1671,32 +1671,12 @@
   __ tst(r2, Operand(kFailureTagMask));
   __ b(eq, &failure_returned);
 
-  // clear top frame
-  __ mov(r3, Operand(0));
-  __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
-  __ str(r3, MemOperand(ip));
-
-  // Restore the memory copy of the registers by digging them out from
-  // the stack.
-  if (do_restore) {
-    // Ok to clobber r2 and r3.
-    const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
-    const int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
-    __ add(r3, fp, Operand(kOffset));
-    __ CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved);
-  }
-
-  // Exit C frame and return
+  // Exit C frame and return.
   // r0:r1: result
   // sp: stack pointer
   // fp: frame pointer
   // pp: caller's parameter pointer pp  (restored as C callee-saved)
-
-  // Restore current context from top and clear it in debug mode.
-  __ mov(r3, Operand(Top::context_address()));
-  __ ldr(cp, MemOperand(r3));
-  __ mov(sp, Operand(fp));  // respect ABI stack constraint
-  __ ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
+  __ LeaveExitFrame(frame_type);
 
   // check if we should retry or throw exception
   Label retry;
@@ -1744,47 +1724,16 @@
   // this by performing a garbage collection and retrying the
   // builtin once.
 
-  // Enter C frame
-  // Compute parameter pointer before making changes and save it as ip register
-  // so that it is restored as sp register on exit, thereby popping the args.
-  // ip = sp + kPointerSize*args_len;
-  __ add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
+  StackFrame::Type frame_type = is_debug_break
+      ? StackFrame::EXIT_DEBUG
+      : StackFrame::EXIT;
 
-  // push in reverse order:
-  // caller_fp, sp_on_exit, caller_pc
-  __ stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
-  __ mov(fp, Operand(sp));  // setup new frame pointer
+  // Enter the exit frame that transitions from JavaScript to C++.
+  __ EnterExitFrame(frame_type);
 
-  // Store the current context in top.
-  __ mov(ip, Operand(ExternalReference(Top::k_context_address)));
-  __ str(cp, MemOperand(ip));
-
-  // remember top frame
-  __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
-  __ str(fp, MemOperand(ip));
-
-  // Push debug marker.
-  __ mov(ip, Operand(is_debug_break ? 1 : 0));
-  __ push(ip);
-
-  if (is_debug_break) {
-    // Save the state of all registers to the stack from the memory location.
-    // Use sp as base to push.
-    __ CopyRegistersFromMemoryToStack(sp, kJSCallerSaved);
-  }
-
-  // move number of arguments (argc) into callee-saved register
-  __ mov(r4, Operand(r0));
-
-  // move pointer to builtin function into callee-saved register
-  __ mov(r5, Operand(r1));
-
-  // r0: result parameter for PerformGC, if any (setup below)
-  // r4: number of arguments
-  // r5: pointer to builtin function  (C callee-saved)
-
-  Label entry;
-  __ bind(&entry);
+  // r4: number of arguments (C callee-saved)
+  // r5: pointer to builtin function (C callee-saved)
+  // r6: pointer to first argument (C callee-saved)
 
   Label throw_out_of_memory_exception;
   Label throw_normal_exception;
@@ -1797,20 +1746,20 @@
   GenerateCore(masm,
                &throw_normal_exception,
                &throw_out_of_memory_exception,
-               FLAG_gc_greedy,
-               is_debug_break);
+               frame_type,
+               FLAG_gc_greedy);
 #else
   GenerateCore(masm,
                &throw_normal_exception,
                &throw_out_of_memory_exception,
-               false,
-               is_debug_break);
+               frame_type,
+               false);
 #endif
   GenerateCore(masm,
                &throw_normal_exception,
                &throw_out_of_memory_exception,
-               true,
-               is_debug_break);
+               frame_type,
+               true);
 
   __ bind(&throw_out_of_memory_exception);
   GenerateThrowOutOfMemory(masm);
@@ -2066,9 +2015,9 @@
     Variable* var = ref()->expression()->AsVariableProxy()->AsVariable();
     if (var != NULL) {
       ASSERT(var->is_global());
-      __ Call(ic, code_target_context);
+      __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
     } else {
-      __ Call(ic, code_target);
+      __ Call(ic, RelocInfo::CODE_TARGET);
     }
 
   } else {
@@ -2099,7 +2048,7 @@
     // Setup the name register.
     masm->mov(r2, Operand(name));
     Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
-    masm->Call(ic, code_target);
+    masm->Call(ic, RelocInfo::CODE_TARGET);
 
   } else {
     // Access keyed property.
@@ -3705,7 +3654,7 @@
     // Setup the receiver register and call the IC initialization code.
     Handle<Code> stub = ComputeCallInitialize(args->length());
     __ RecordPosition(node->position());
-    __ Call(stub, code_target_context);
+    __ Call(stub, RelocInfo::CODE_TARGET_CONTEXT);
     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
     // Remove the function from the stack.
     __ pop();
@@ -3752,7 +3701,7 @@
       // Set the receiver register and call the IC initialization code.
       Handle<Code> stub = ComputeCallInitialize(args->length());
       __ RecordPosition(node->position());
-      __ Call(stub, code_target);
+      __ Call(stub, RelocInfo::CODE_TARGET);
       __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
 
       // Remove the function from the stack.
@@ -3819,9 +3768,9 @@
 
   // Call the construct call builtin that handles allocation and
   // constructor invocation.
-  __ RecordPosition(position);
+  __ RecordPosition(RelocInfo::POSITION);
   __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
-          js_construct_call);
+          RelocInfo::CONSTRUCT_CALL);
 
   // Discard old TOS value and push r0 on the stack (same as Pop(), push(r0)).
   __ str(r0, MemOperand(sp, 0 * kPointerSize));
@@ -3996,7 +3945,7 @@
 
     // Call the JS runtime function.
     Handle<Code> stub = ComputeCallInitialize(args->length());
-    __ Call(stub, code_target);
+    __ Call(stub, RelocInfo::CODE_TARGET);
     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
     __ pop();
     __ push(r0);
@@ -4534,7 +4483,7 @@
 void ArmCodeGenerator::RecordStatementPosition(Node* node) {
   if (FLAG_debug_info) {
     int statement_pos = node->statement_pos();
-    if (statement_pos == kNoPosition) return;
+    if (statement_pos == RelocInfo::kNoPosition) return;
     __ RecordStatementPosition(statement_pos);
   }
 }
diff --git a/src/codegen-ia32.cc b/src/codegen-ia32.cc
index b6c3ed9..1c6fc5b 100644
--- a/src/codegen-ia32.cc
+++ b/src/codegen-ia32.cc
@@ -314,6 +314,40 @@
   NODE_LIST(DEF_VISIT)
 #undef DEF_VISIT
 
+  // Only allow fast-case switch if the range of labels is at most
+  // this factor times the number of case labels.
+  // Value is derived from comparing the size of code generated by the normal
+  // switch code for Smi-labels to the size of a single pointer. If code
+  // quality increases this number should be decreased to match.
+  static const int kFastSwitchMaxOverheadFactor = 5;
+
+  // Minimal number of switch cases required before we allow jump-table
+  // optimization.
+  static const int kFastSwitchMinCaseCount = 5;
+
+  // Create fast switch implementation if all labels are small integers
+  // in a limited range. Returns false if this is not the case, and no
+  // code has been generated (i.e., the default implementation should be used).
+  bool TryFastCaseSwitchStatement(SwitchStatement *switchStmt);
+
+  // Generate a computed jump with an empty jump table.
+  // Binds a label to the start of the jump table. This table must
+  // be populated later when the adresses of the targets are known.
+  // Used by GenerateFastCaseSwitchStatement.
+  void GenerateFastCaseSwitchJumpTable(
+      int min_index, int range, Label *fail_label, Label &table_start);
+
+  // Populate an empty jump table with the adresses of bound labels.
+  // Used by GenerateFastCaseSwitchStatement.
+  void PopulateFastCaseSwitchJumpTable(
+      Label &table_start, SmartPointer<Label*> &case_targets, int table_size);
+
+  // Generates a fast-case switch statement for a switch with all-Smi labels
+  // in a limited range.
+  // Used by TryFastCaseSwitchStatement.
+  void GenerateFastCaseSwitchStatement(
+      SwitchStatement *node, int min_index, int range, int default_index);
+
   void RecordStatementPosition(Node* node);
 
   // Activation frames.
@@ -844,7 +878,7 @@
     Literal key(variable->name());
     // TODO(1241834): Fetch the position from the variable instead of using
     // no position.
-    Property property(&global, &key, kNoPosition);
+    Property property(&global, &key, RelocInfo::kNoPosition);
     Load(&property);
   } else {
     Load(x, CodeGenState::LOAD_TYPEOF_EXPR);
@@ -1170,9 +1204,9 @@
     __ Set(ecx, Immediate(name));
     if (var != NULL) {
       ASSERT(var->is_global());
-      __ call(ic, code_target_context);
+      __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
     } else {
-      __ call(ic, code_target);
+      __ call(ic, RelocInfo::CODE_TARGET);
     }
   } else {
     // Access keyed property.
@@ -1183,9 +1217,9 @@
     Variable* var = ref()->expression()->AsVariableProxy()->AsVariable();
     if (var != NULL) {
       ASSERT(var->is_global());
-      __ call(ic, code_target_context);
+      __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
     } else {
-      __ call(ic, code_target);
+      __ call(ic, RelocInfo::CODE_TARGET);
     }
   }
   __ push(eax);  // IC call leaves result in eax, push it out
@@ -1209,7 +1243,7 @@
     masm->pop(eax);
     // Setup the name register.
     masm->Set(ecx, Immediate(name));
-    masm->call(ic, code_target);
+    masm->call(ic, RelocInfo::CODE_TARGET);
   } else {
     // Access keyed property.
     ASSERT(type == Reference::KEYED);
@@ -1218,7 +1252,7 @@
     Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
     // TODO(1222589): Make the IC grab the values from the stack.
     masm->pop(eax);
-    masm->call(ic, code_target);
+    masm->call(ic, RelocInfo::CODE_TARGET);
   }
   masm->push(eax);  // IC call leaves result in eax, push it out
 }
@@ -2594,7 +2628,7 @@
   masm->Set(ebx, Immediate(0));
   masm->GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
   Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
-  masm->jmp(adaptor, code_target);
+  masm->jmp(adaptor, RelocInfo::CODE_TARGET);
 }
 
 
@@ -2626,37 +2660,23 @@
 }
 
 
-class StackCheckDeferred: public DeferredCode {
- public:
-  explicit StackCheckDeferred(CodeGenerator* generator)
-      : DeferredCode(generator) {
-    set_comment("[ StackCheckDeferred");
-  }
-  virtual void Generate();
-};
-
-
-void StackCheckDeferred::Generate() {
-  StackCheckStub stub;
-  __ CallStub(&stub);
-}
-
-
 void Ia32CodeGenerator::CheckStack() {
   if (FLAG_check_stack) {
-    StackCheckDeferred* deferred = new StackCheckDeferred(this);
+    Label stack_is_ok;
+    StackCheckStub stub;
     ExternalReference stack_guard_limit =
         ExternalReference::address_of_stack_guard_limit();
     __ cmp(esp, Operand::StaticVariable(stack_guard_limit));
-    __ j(below, deferred->enter(), not_taken);
-    __ bind(deferred->exit());
+    __ j(above_equal, &stack_is_ok, taken);
+    __ CallStub(&stub);
+    __ bind(&stack_is_ok);
   }
 }
 
 
 void Ia32CodeGenerator::VisitBlock(Block* node) {
   Comment cmnt(masm_, "[ Block");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   node->set_break_stack_height(break_stack_height_);
   VisitStatements(node->statements());
   __ bind(node->break_target());
@@ -2734,7 +2754,7 @@
 
 void Ia32CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
   Comment cmnt(masm_, "[ ExpressionStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   Expression* expression = node->expression();
   expression->MarkAsStatement();
   Load(expression);
@@ -2755,7 +2775,7 @@
   bool has_then_stm = node->HasThenStatement();
   bool has_else_stm = node->HasElseStatement();
 
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   Label exit;
   if (has_then_stm && has_else_stm) {
     Label then;
@@ -2819,7 +2839,7 @@
 
 void Ia32CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
   Comment cmnt(masm_, "[ ContinueStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   CleanStack(break_stack_height_ - node->target()->break_stack_height());
   __ jmp(node->target()->continue_target());
 }
@@ -2827,7 +2847,7 @@
 
 void Ia32CodeGenerator::VisitBreakStatement(BreakStatement* node) {
   Comment cmnt(masm_, "[ BreakStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   CleanStack(break_stack_height_ - node->target()->break_stack_height());
   __ jmp(node->target()->break_target());
 }
@@ -2835,7 +2855,7 @@
 
 void Ia32CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
   Comment cmnt(masm_, "[ ReturnStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   Load(node->expression());
 
   // Move the function result into eax
@@ -2873,7 +2893,7 @@
 
 void Ia32CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
   Comment cmnt(masm_, "[ WithEnterStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   Load(node->expression());
   __ CallRuntime(Runtime::kPushContext, 1);
 
@@ -2900,20 +2920,170 @@
 }
 
 
+// Generate a computed jump with an empty jump table.
+// Returns a label pointing to the start of the jump table. This must
+// be populated later when the adresses of the targets are known
+void Ia32CodeGenerator::GenerateFastCaseSwitchJumpTable(
+  int min_index, int range, Label *fail_label, Label &table_start) {
+  // Notice: Internal references, used by both the jmp instruction and
+  // the table entries, need to be relocated if the buffer grows. This
+  // prevents the forward use of Labels, since a displacement cannot
+  // survive relocation, and it also cannot safely be distinguished
+  // from a real address.  Instead we put in zero-values as
+  // placeholders, and fill in the adresses after the labels have been
+  // bound.
+
+  __ pop(eax);  // supposed smi
+  // check range of value, if outside [0..length-1] jump to default/end label.
+  ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+  if (min_index != 0) {
+    __ sub(Operand(eax), Immediate(min_index * 2));  // smi subtraction
+  }
+  __ test(eax, Immediate(0x80000000 | kSmiTagMask));  // negative or not smi
+  __ j(not_equal, fail_label, not_taken);
+  __ cmp(eax, range * 2);
+  __ j(greater_equal, fail_label, not_taken);
+
+  // 0 is placeholder.
+  __ jmp(Operand(eax, times_2, 0x0, RelocInfo::INTERNAL_REFERENCE));
+  // calculate address to overwrite later with actual address of table.
+  int32_t jump_table_ref = __ pc_offset() - sizeof(int32_t);
+
+  __ Align(4);
+  __ bind(&table_start);
+  __ WriteInternalReference(jump_table_ref, table_start);
+
+  for (int i = 0; i < range; i++) {
+    __ dd(0x0, RelocInfo::INTERNAL_REFERENCE);  // table entry, 0 is placeholder
+  }
+}
+
+
+// Populate an empty jump table with the adresses of bound labels.
+void Ia32CodeGenerator::PopulateFastCaseSwitchJumpTable(
+    Label &table_start, SmartPointer<Label*> &case_targets, int table_size) {
+  for (int i = 0; i < table_size; i++) {
+    int table_entry_pos = table_start.pos() + i * sizeof(uint32_t);
+    __ WriteInternalReference(table_entry_pos, *case_targets[i]);
+  }
+}
+
+
+// Generates a fast-case switch statement for a switch with all-Smi labels
+// in a limited range.
+void Ia32CodeGenerator::GenerateFastCaseSwitchStatement(
+    SwitchStatement *node, int min_index, int range, int default_index) {
+  ZoneList<CaseClause*>* cases = node->cases();
+  int length = cases->length();
+
+  SmartPointer<Label*> case_targets(NewArray<Label*>(range));
+  SmartPointer<Label> case_labels(NewArray<Label>(length));
+
+  Label* fail_label = (default_index >= 0 ? &(case_labels[default_index])
+                                          : node->break_target());
+
+  // Create array of labels to jump to by index and set default jump
+  // targets everywhere.
+  for (int i = 0; i < range; i++) {
+    // length => end label
+    case_targets[i] = fail_label;
+  }
+
+  // Overwrite for values of cases:
+  // (reverse order, so that if same label twice, the first one wins).
+  for (int i = length-1; i >= 0 ; i--) {
+    CaseClause* clause = cases->at(i);
+    if (!clause->is_default()) {
+      Object* label_value = *(clause->label()->AsLiteral()->handle());
+      int case_value = Smi::cast(label_value)->value();
+      case_targets[case_value - min_index] = &(case_labels[i]);
+    }
+  }
+
+  // Generate the jump table and code for all cases.
+  Label table_start;
+
+  GenerateFastCaseSwitchJumpTable(min_index, range, fail_label, table_start);
+
+  for (int i = 0; i < length; i++) {
+    Comment cmnt(masm_, "[ case clause");
+    __ bind(&(case_labels[i]));
+    VisitStatements(cases->at(i)->statements());
+  }
+
+  __ bind(node->break_target());
+
+  // All labels bound now, so we can populate the table with the
+  // correct addresses.
+  PopulateFastCaseSwitchJumpTable(table_start, case_targets, range);
+}
+
+
+bool Ia32CodeGenerator::TryFastCaseSwitchStatement(SwitchStatement *node) {
+  ZoneList<CaseClause*>* cases = node->cases();
+  int length = cases->length();
+
+  if (length < kFastSwitchMinCaseCount) {
+    return false;
+  }
+
+  // Test whether fast-case should be used.
+  int default_index = -1;
+  int min_index = Smi::kMaxValue;
+  int max_index = Smi::kMinValue;
+  for (int i = 0; i < length; i++) {
+    CaseClause* clause = cases->at(i);
+    if (clause->is_default()) {
+      if (default_index >= 0) {
+        return false;  // More than one default label:
+                       // Defer to normal case for error.
+    }
+      default_index = i;
+    } else {
+      Expression* label = clause->label();
+      Literal* literal = label->AsLiteral();
+      if (literal == NULL) {
+        return false;  // fail fast case
+      }
+      Object* value = *(literal->handle());
+      if (!value->IsSmi()) {
+        return false;
+      }
+      int smi = Smi::cast(value)->value();
+      if (smi < min_index) { min_index = smi; }
+      if (smi > max_index) { max_index = smi; }
+    }
+  }
+
+  // After this all labels are smis.
+  int range = max_index - min_index + 1;  // |min..max| inclusive
+  if (range / kFastSwitchMaxOverheadFactor > length) {
+    return false;  // range of labels is too sparse
+  }
+
+  // Optimization accepted, generate code.
+  GenerateFastCaseSwitchStatement(node, min_index, range, default_index);
+  return true;
+}
+
+
 void Ia32CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
   Comment cmnt(masm_, "[ SwitchStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   node->set_break_stack_height(break_stack_height_);
 
   Load(node->tag());
 
+  if (TryFastCaseSwitchStatement(node)) {
+    return;
+  }
+
   Label next, fall_through, default_case;
   ZoneList<CaseClause*>* cases = node->cases();
   int length = cases->length();
 
   for (int i = 0; i < length; i++) {
     CaseClause* clause = cases->at(i);
-
     Comment cmnt(masm_, "[ case clause");
 
     if (clause->is_default()) {
@@ -2966,7 +3136,7 @@
 
 void Ia32CodeGenerator::VisitLoopStatement(LoopStatement* node) {
   Comment cmnt(masm_, "[ LoopStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   node->set_break_stack_height(break_stack_height_);
 
   // simple condition analysis
@@ -3007,7 +3177,8 @@
     // Record source position of the statement as this code which is after the
     // code for the body actually belongs to the loop statement and not the
     // body.
-    if (FLAG_debug_info) __ RecordPosition(node->statement_pos());
+    RecordStatementPosition(node);
+    __ RecordPosition(node->statement_pos());
     ASSERT(node->type() == LoopStatement::FOR_LOOP);
     Visit(node->next());
   }
@@ -3034,7 +3205,7 @@
 
 void Ia32CodeGenerator::VisitForInStatement(ForInStatement* node) {
   Comment cmnt(masm_, "[ ForInStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
 
   // We keep stuff on the stack while the body is executing.
   // Record it, so that a break/continue crossing this statement
@@ -3446,7 +3617,7 @@
 
 void Ia32CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
   Comment cmnt(masm_, "[ DebuggerStatement");
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   __ CallRuntime(Runtime::kDebugBreak, 1);
   __ push(eax);
 }
@@ -3640,7 +3811,8 @@
 // therefore context dependent.
 class ObjectLiteralDeferred: public DeferredCode {
  public:
-  ObjectLiteralDeferred(CodeGenerator* generator, ObjectLiteral* node)
+  ObjectLiteralDeferred(CodeGenerator* generator,
+                        ObjectLiteral* node)
       : DeferredCode(generator), node_(node) {
     set_comment("[ ObjectLiteralDeferred");
   }
@@ -3667,7 +3839,6 @@
 
 void Ia32CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
   Comment cmnt(masm_, "[ ObjectLiteral");
-
   ObjectLiteralDeferred* deferred = new ObjectLiteralDeferred(this, node);
 
   // Retrieve the literal array and check the allocated entry.
@@ -3696,6 +3867,7 @@
   // Push the new cloned literal object as the result.
   __ push(eax);
 
+
   for (int i = 0; i < node->properties()->length(); i++) {
     ObjectLiteral::Property* property  = node->properties()->at(i);
     switch (property->kind()) {
@@ -3709,7 +3881,7 @@
           Load(property->value());
           __ pop(eax);
           __ Set(ecx, Immediate(key));
-          __ call(ic, code_target);
+          __ call(ic, RelocInfo::CODE_TARGET);
           __ add(Operand(esp), Immediate(kPointerSize));
           // Ignore result.
           break;
@@ -3809,7 +3981,7 @@
 void Ia32CodeGenerator::VisitAssignment(Assignment* node) {
   Comment cmnt(masm_, "[ Assignment");
 
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
   Reference target(this, node->target());
   if (target.is_illegal()) return;
 
@@ -3877,7 +4049,7 @@
 
   ZoneList<Expression*>* args = node->arguments();
 
-  if (FLAG_debug_info) RecordStatementPosition(node);
+  RecordStatementPosition(node);
 
   // Check if the function is a variable or a property.
   Expression* function = node->expression();
@@ -3910,7 +4082,7 @@
     // Setup the receiver register and call the IC initialization code.
     Handle<Code> stub = ComputeCallInitialize(args->length());
     __ RecordPosition(node->position());
-    __ call(stub, code_target_context);
+    __ call(stub, RelocInfo::CODE_TARGET_CONTEXT);
     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
 
     // Overwrite the function on the stack with the result.
@@ -3954,7 +4126,7 @@
       // Call the IC initialization code.
       Handle<Code> stub = ComputeCallInitialize(args->length());
       __ RecordPosition(node->position());
-      __ call(stub, code_target);
+      __ call(stub, RelocInfo::CODE_TARGET);
       __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
 
       // Overwrite the function on the stack with the result.
@@ -4024,7 +4196,7 @@
   // constructor invocation.
   __ RecordPosition(node->position());
   __ call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
-          js_construct_call);
+          RelocInfo::CONSTRUCT_CALL);
   __ mov(TOS, eax);  // discard the function and "push" the newly created object
 }
 
@@ -4319,7 +4491,7 @@
     // Call the JS runtime function.
     Handle<Code> stub = ComputeCallInitialize(args->length());
     __ Set(eax, Immediate(args->length()));
-    __ call(stub, code_target);
+    __ call(stub, RelocInfo::CODE_TARGET);
     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
     __ mov(TOS, eax);
   }
@@ -4979,7 +5151,7 @@
 void Ia32CodeGenerator::RecordStatementPosition(Node* node) {
   if (FLAG_debug_info) {
     int pos = node->statement_pos();
-    if (pos != kNoPosition) {
+    if (pos != RelocInfo::kNoPosition) {
       __ RecordStatementPosition(pos);
     }
   }
@@ -5019,19 +5191,6 @@
 #define __  masm->
 
 
-void CEntryStub::GenerateReserveCParameterSpace(MacroAssembler* masm,
-                                                int num_parameters) {
-  if (num_parameters > 0) {
-    __ sub(Operand(esp), Immediate(num_parameters * kPointerSize));
-  }
-  // OS X activation frames are 16 byte-aligned
-  // (see "Mac OS X ABI Function Call Guide").
-  const int kFrameAlignment = 16;
-  ASSERT(IsPowerOf2(kFrameAlignment));
-  __ and_(esp, -kFrameAlignment);
-}
-
-
 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
   ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize);  // adjust this code
   ExternalReference handler_address(Top::k_handler_address);
@@ -5060,26 +5219,23 @@
 void CEntryStub::GenerateCore(MacroAssembler* masm,
                               Label* throw_normal_exception,
                               Label* throw_out_of_memory_exception,
-                              bool do_gc,
-                              bool do_restore) {
+                              StackFrame::Type frame_type,
+                              bool do_gc) {
   // eax: result parameter for PerformGC, if any
   // ebx: pointer to C function  (C callee-saved)
   // ebp: frame pointer  (restored after C call)
   // esp: stack pointer  (restored after C call)
   // edi: number of arguments including receiver  (C callee-saved)
+  // esi: pointer to the first argument (C callee-saved)
 
   if (do_gc) {
     __ mov(Operand(esp, 0 * kPointerSize), eax);  // Result.
-    __ call(FUNCTION_ADDR(Runtime::PerformGC), runtime_entry);
+    __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
   }
 
   // Call C function.
-  __ lea(eax, Operand(ebp,
-                      edi,
-                      times_4,
-                      StandardFrameConstants::kCallerSPOffset - kPointerSize));
   __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
-  __ mov(Operand(esp, 1 * kPointerSize), eax);  // argv.
+  __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
   __ call(Operand(ebx));
   // Result is in eax or edx:eax - do not destroy these registers!
 
@@ -5091,41 +5247,11 @@
   __ test(ecx, Immediate(kFailureTagMask));
   __ j(zero, &failure_returned, not_taken);
 
-  // Restore number of arguments to ecx and clear top frame.
-  __ mov(ecx, Operand(edi));
-  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
-  __ mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
-
-  // Restore the memory copy of the registers by digging them out from
-  // the stack.
-  if (do_restore) {
-    // Ok to clobber ebx and edi - function pointer and number of arguments not
-    // needed anymore.
-    const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
-    int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
-    __ lea(ebx, Operand(ebp, kOffset));
-    __ CopyRegistersFromStackToMemory(ebx, edi, kJSCallerSaved);
-  }
-
-  // Exit C frame.
-  __ lea(esp, Operand(ebp, -1 * kPointerSize));
-  __ pop(ebx);
-  __ pop(ebp);
-
-  // Restore current context from top and clear it in debug mode.
-  ExternalReference context_address(Top::k_context_address);
-  __ mov(esi, Operand::StaticVariable(context_address));
-  if (kDebug) {
-    __ mov(Operand::StaticVariable(context_address), Immediate(0));
-  }
-
-  // Pop arguments from caller's stack and return.
-  __ pop(ebx);  // Ok to clobber ebx - function pointer not needed anymore.
-  __ lea(esp, Operand(esp, ecx, times_4, 0));
-  __ push(ebx);
+  // Exit the JavaScript to C++ exit frame.
+  __ LeaveExitFrame(frame_type);
   __ ret(0);
 
-  // Handling of Failure.
+  // Handling of failure.
   __ bind(&failure_returned);
 
   Label retry;
@@ -5222,55 +5348,19 @@
   // this by performing a garbage collection and retrying the
   // builtin once.
 
-  // Enter C frame.
-  // Here we make the following assumptions and use them when setting
-  // up the top-most Frame. Adjust the code if these assumptions
-  // change.
-  ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
-  ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
-  ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
-  ASSERT(ExitFrameConstants::kSPOffset  == -2 * kPointerSize);
-  __ push(ebp);  // caller fp
-  __ mov(ebp, Operand(esp));  // C entry fp
-  __ push(ebx);  // C function
-  __ push(Immediate(0));  // saved entry sp, set before call
-  __ push(Immediate(is_debug_break ? 1 : 0));
+  StackFrame::Type frame_type = is_debug_break ?
+      StackFrame::EXIT_DEBUG :
+      StackFrame::EXIT;
 
-  // Remember top frame.
-  ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
-  ExternalReference context_address(Top::k_context_address);
-  __ mov(Operand::StaticVariable(c_entry_fp), ebp);
-  __ mov(Operand::StaticVariable(context_address), esi);
-
-  if (is_debug_break) {
-    // Save the state of all registers to the stack from the memory
-    // location.
-
-    // TODO(1243899): This should be symmetric to
-    // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
-    // correct here, but computed for the other call. Very error
-    // prone! FIX THIS.  Actually there are deeper problems with
-    // register saving than this asymmetry (see the bug report
-    // associated with this issue).
-    __ PushRegistersFromMemory(kJSCallerSaved);
-  }
-
-  // Move number of arguments (argc) into callee-saved register. Note
-  // that edi is only available after remembering the top frame.
-  __ mov(edi, Operand(eax));
-
-  // Allocate stack space for 2 arguments (argc, argv).
-  GenerateReserveCParameterSpace(masm, 2);
-  __ mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);  // save entry sp
+  // Enter the exit frame that transitions from JavaScript to C++.
+  __ EnterExitFrame(frame_type);
 
   // eax: result parameter for PerformGC, if any (setup below)
   // ebx: pointer to builtin function  (C callee-saved)
   // ebp: frame pointer  (restored after C call)
   // esp: stack pointer  (restored after C call)
   // edi: number of arguments including receiver (C callee-saved)
-
-  Label entry;
-  __ bind(&entry);
+  // esi: argv pointer (C callee-saved)
 
   Label throw_out_of_memory_exception;
   Label throw_normal_exception;
@@ -5282,20 +5372,21 @@
   }
   GenerateCore(masm, &throw_normal_exception,
                &throw_out_of_memory_exception,
-               FLAG_gc_greedy,
-               is_debug_break);
+               frame_type,
+               FLAG_gc_greedy);
 #else
   GenerateCore(masm,
                &throw_normal_exception,
                &throw_out_of_memory_exception,
-               false,
-               is_debug_break);
+               frame_type,
+               false);
 #endif
+
   GenerateCore(masm,
                &throw_normal_exception,
                &throw_out_of_memory_exception,
-               true,
-               is_debug_break);
+               frame_type,
+               true);
 
   __ bind(&throw_out_of_memory_exception);
   GenerateThrowOutOfMemory(masm);
diff --git a/src/codegen.cc b/src/codegen.cc
index a7123b0..8eb1a04 100644
--- a/src/codegen.cc
+++ b/src/codegen.cc
@@ -37,8 +37,8 @@
 DeferredCode::DeferredCode(CodeGenerator* generator)
   : masm_(generator->masm()),
     generator_(generator),
-    position_(masm_->last_position()),
-    position_is_statement_(masm_->last_position_is_statement()) {
+    statement_position_(masm_->last_statement_position()),
+    position_(masm_->last_position()) {
   generator->AddDeferred(this);
 #ifdef DEBUG
   comment_ = "";
@@ -51,9 +51,10 @@
     DeferredCode* code = deferred_.RemoveLast();
     MacroAssembler* masm = code->masm();
     // Record position of deferred code stub.
-    if (code->position_is_statement()) {
-      masm->RecordStatementPosition(code->position());
-    } else {
+    if (code->statement_position() != RelocInfo::kNoPosition) {
+      masm->RecordStatementPosition(code->statement_position());
+    }
+    if (code->position() != RelocInfo::kNoPosition) {
       masm->RecordPosition(code->position());
     }
     // Bind labels and generate the code.
diff --git a/src/codegen.h b/src/codegen.h
index 43d780e..3ec6b47 100644
--- a/src/codegen.h
+++ b/src/codegen.h
@@ -63,8 +63,8 @@
   Label* enter() { return &enter_; }
   Label* exit() { return &exit_; }
 
+  int statement_position() const { return statement_position_; }
   int position() const { return position_; }
-  bool position_is_statement() const { return position_is_statement_; }
 
 #ifdef DEBUG
   void set_comment(const char* comment) { comment_ = comment; }
@@ -84,8 +84,8 @@
   CodeGenerator* const generator_;
   Label enter_;
   Label exit_;
+  int statement_position_;
   int position_;
-  bool position_is_statement_;
 #ifdef DEBUG
   const char* comment_;
 #endif
@@ -248,10 +248,10 @@
   void GenerateCore(MacroAssembler* masm,
                     Label* throw_normal_exception,
                     Label* throw_out_of_memory_exception,
-                    bool do_gc, bool do_restore);
+                    StackFrame::Type frame_type,
+                    bool do_gc);
   void GenerateThrowTOS(MacroAssembler* masm);
   void GenerateThrowOutOfMemory(MacroAssembler* masm);
-  void GenerateReserveCParameterSpace(MacroAssembler* masm, int num_parameters);
 
  private:
   Major MajorKey() { return CEntry; }
@@ -274,7 +274,6 @@
 };
 
 
-
 class JSEntryStub : public CodeStub {
  public:
   JSEntryStub() { }
diff --git a/src/compiler.cc b/src/compiler.cc
index 4a6d4a4..999cf4c 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -130,7 +130,7 @@
                                       code);
 
   CodeGenerator::SetFunctionInfo(fun, lit->scope()->num_parameters(),
-                                 kNoPosition,
+                                 RelocInfo::kNoPosition,
                                  lit->start_position(), lit->end_position(),
                                  lit->is_expression(), true, script);
 
@@ -202,8 +202,9 @@
 }
 
 
-Handle<JSFunction> Compiler::CompileEval(bool is_global,
-                                         Handle<String> source) {
+Handle<JSFunction> Compiler::CompileEval(Handle<String> source,
+                                         int line_offset,
+                                         bool is_global) {
   Counters::total_eval_size.Increment(source->length());
   Counters::total_compile_size.Increment(source->length());
 
@@ -219,6 +220,7 @@
   if (result.is_null()) {
     // Create a script object describing the script to be compiled.
     Handle<Script> script = Factory::NewScript(source);
+    script->set_line_offset(Smi::FromInt(line_offset));
     result = MakeFunction(is_global, true, script, NULL, NULL);
     if (!result.is_null()) {
       CompilationCache::Associate(source, entry, result);
diff --git a/src/compiler.h b/src/compiler.h
index 1f37ad6..3bbc9aa 100644
--- a/src/compiler.h
+++ b/src/compiler.h
@@ -57,7 +57,9 @@
                                     ScriptDataImpl* script_Data);
 
   // Compile a String source within a context for Eval.
-  static Handle<JSFunction> CompileEval(bool is_global, Handle<String> source);
+  static Handle<JSFunction> CompileEval(Handle<String> source,
+                                        int line_offset,
+                                        bool is_global);
 
   // Compile from function info (used for lazy compilation). Returns
   // true on success and false if the compilation resulted in a stack
diff --git a/src/contexts.h b/src/contexts.h
index f0869ac..00ad7b4 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -91,7 +91,8 @@
   V(EMPTY_SCRIPT_INDEX, Script, empty_script) \
   V(SCRIPT_FUNCTION_INDEX, JSFunction, script_function) \
   V(CONTEXT_EXTENSION_FUNCTION_INDEX, JSFunction, context_extension_function) \
-  V(OUT_OF_MEMORY_INDEX, Object, out_of_memory)
+  V(OUT_OF_MEMORY_INDEX, Object, out_of_memory) \
+  V(MAP_CACHE_INDEX, Object, map_cache)
 
 // JSFunctions are pairs (context, function code), sometimes also called
 // closures. A Context object is used to represent function contexts and
@@ -207,6 +208,7 @@
     SCRIPT_FUNCTION_INDEX,
     CONTEXT_EXTENSION_FUNCTION_INDEX,
     OUT_OF_MEMORY_INDEX,
+    MAP_CACHE_INDEX,
     GLOBAL_CONTEXT_SLOTS
   };
 
diff --git a/src/debug-delay.js b/src/debug-delay.js
index 9ca5b0b..94b4cfc 100644
--- a/src/debug-delay.js
+++ b/src/debug-delay.js
@@ -1230,7 +1230,7 @@
 DebugCommandProcessor.prototype.responseToText = function(json_response) {
   try {
     // Convert the JSON string to an object.
-    response = %CompileString('(' + json_response + ')', false)();
+    response = %CompileString('(' + json_response + ')', 0, false)();
 
     if (!response.success) {
       return response.message;
@@ -1436,7 +1436,7 @@
   try {
     try {
       // Convert the JSON string to an object.
-      request = %CompileString('(' + json_request + ')', false)();
+      request = %CompileString('(' + json_request + ')', 0, false)();
 
       // Create an initial response.
       response = this.createResponse(request);
@@ -1889,7 +1889,7 @@
 DebugCommandProcessor.prototype.isRunning = function(json_response) {
   try {
     // Convert the JSON string to an object.
-    response = %CompileString('(' + json_response + ')', false)();
+    response = %CompileString('(' + json_response + ')', 0, false)();
 
     // Return whether VM should be running after this request.
     return response.running;
diff --git a/src/debug.cc b/src/debug.cc
index 3fc7545..b6925aa 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -94,14 +94,16 @@
     first = false;
     if (RinfoDone()) return;
 
-    // Update the current source position each time a source position is
-    // passed.
-    if (is_position(rmode())) {
-      position_ = rinfo()->data() - debug_info_->shared()->start_position();
-      if (is_statement_position(rmode())) {
+    // Whenever a statement position or (plain) position is passed update the
+    // current value of these.
+    if (RelocInfo::IsPosition(rmode())) {
+      if (RelocInfo::IsStatementPosition(rmode())) {
         statement_position_ =
             rinfo()->data() - debug_info_->shared()->start_position();
       }
+      // Always update the position as we don't want that to be before the
+      // statement position.
+      position_ = rinfo()->data() - debug_info_->shared()->start_position();
       ASSERT(position_ >= 0);
       ASSERT(statement_position_ >= 0);
     }
@@ -109,10 +111,10 @@
     // Check for breakable code target. Look in the original code as setting
     // break points can cause the code targets in the running (debugged) code to
     // be of a different kind than in the original code.
-    if (is_code_target(rmode())) {
+    if (RelocInfo::IsCodeTarget(rmode())) {
       Address target = original_rinfo()->target_address();
       Code* code = Debug::GetCodeTarget(target);
-      if (code->is_inline_cache_stub() || is_js_construct_call(rmode())) {
+      if (code->is_inline_cache_stub() || RelocInfo::IsConstructCall(rmode())) {
         break_point_++;
         return;
       }
@@ -133,8 +135,7 @@
     }
 
     // Check for break at return.
-    // Currently is_exit_js_frame is used on ARM.
-    if (is_js_return(rmode()) || is_exit_js_frame(rmode())) {
+    if (RelocInfo::IsJSReturn(rmode())) {
       // Set the positions to the end of the function.
       if (debug_info_->shared()->HasSourceCode()) {
         position_ = debug_info_->shared()->end_position() -
@@ -283,7 +284,7 @@
     return;
   }
 
-  if (is_js_return(rmode())) {
+  if (RelocInfo::IsJSReturn(rmode())) {
     // This path is currently only used on IA32 as JSExitFrame on ARM uses a
     // stub.
     // Patch the JS frame exit code with a debug break call. See
@@ -308,7 +309,7 @@
 
 
 void BreakLocationIterator::ClearDebugBreak() {
-  if (is_js_return(rmode())) {
+  if (RelocInfo::IsJSReturn(rmode())) {
     // Restore the JS frame exit code.
     rinfo()->patch_code(original_rinfo()->pc(),
                         Debug::kIa32JSReturnSequenceLength);
@@ -339,15 +340,14 @@
     }
   } else {
     // Step in through constructs call requires no changes to the running code.
-    ASSERT(is_js_construct_call(rmode()));
+    ASSERT(RelocInfo::IsConstructCall(rmode()));
   }
 }
 
 
 // Check whether the break point is at a position which will exit the function.
 bool BreakLocationIterator::IsExit() const {
-  // Currently is_exit_js_frame is used on ARM.
-  return (is_js_return(rmode()) || is_exit_js_frame(rmode()));
+  return (RelocInfo::IsJSReturn(rmode()));
 }
 
 
@@ -358,7 +358,7 @@
 
 // Check whether there is a debug break at the current position.
 bool BreakLocationIterator::IsDebugBreak() {
-  if (is_js_return(rmode())) {
+  if (RelocInfo::IsJSReturn(rmode())) {
     // This is IA32 specific but works as long as the ARM version
     // still uses a stub for JSExitFrame.
     //
@@ -401,7 +401,7 @@
 // Threading support.
 void Debug::ThreadInit() {
   thread_local_.last_step_action_ = StepNone;
-  thread_local_.last_statement_position_ = kNoPosition;
+  thread_local_.last_statement_position_ = RelocInfo::kNoPosition;
   thread_local_.step_count_ = 0;
   thread_local_.last_fp_ = 0;
   thread_local_.step_into_fp_ = 0;
@@ -919,7 +919,7 @@
 
   // Compute whether or not the target is a call target.
   bool is_call_target = false;
-  if (is_code_target(it.rinfo()->rmode())) {
+  if (RelocInfo::IsCodeTarget(it.rinfo()->rmode())) {
     Address target = it.rinfo()->target_address();
     Code* code = Debug::GetCodeTarget(target);
     if (code->is_call_stub()) is_call_target = true;
@@ -935,7 +935,7 @@
       JSFunction* function = JSFunction::cast(frames_it.frame()->function());
       FloodWithOneShot(Handle<SharedFunctionInfo>(function->shared()));
     }
-  } else if (!(is_call_target || is_js_construct_call(it.rmode())) ||
+  } else if (!(is_call_target || RelocInfo::IsConstructCall(it.rmode())) ||
              step_action == StepNext || step_action == StepMin) {
     // Step next or step min.
 
@@ -978,7 +978,7 @@
     int current_statement_position =
         break_location_iterator->code()->SourceStatementPosition(frame->pc());
     return thread_local_.last_fp_ == frame->fp() &&
-           thread_local_.last_statement_position_ == current_statement_position;
+        thread_local_.last_statement_position_ == current_statement_position;
   }
 
   // No step next action - don't continue.
@@ -1015,9 +1015,9 @@
 Handle<Code> Debug::FindDebugBreak(RelocInfo* rinfo) {
   // Find the builtin debug break function matching the calling convention
   // used by the call site.
-  RelocMode mode = rinfo->rmode();
+  RelocInfo::Mode mode = rinfo->rmode();
 
-  if (is_code_target(mode)) {
+  if (RelocInfo::IsCodeTarget(mode)) {
     Address target = rinfo->target_address();
     Code* code = Debug::GetCodeTarget(target);
     if (code->is_inline_cache_stub()) {
@@ -1041,15 +1041,11 @@
         return result;
       }
     }
-    if (is_js_construct_call(mode)) {
+    if (RelocInfo::IsConstructCall(mode)) {
       Handle<Code> result =
           Handle<Code>(Builtins::builtin(Builtins::ConstructCall_DebugBreak));
       return result;
     }
-    // Currently is_exit_js_frame is used on ARM.
-    if (is_exit_js_frame(mode)) {
-      return Handle<Code>(Builtins::builtin(Builtins::Return_DebugBreak));
-    }
     if (code->kind() == Code::STUB) {
       ASSERT(code->major_key() == CodeStub::CallFunction ||
              code->major_key() == CodeStub::StackCheck);
@@ -1130,7 +1126,7 @@
 
 void Debug::ClearStepNext() {
   thread_local_.last_step_action_ = StepNone;
-  thread_local_.last_statement_position_ = kNoPosition;
+  thread_local_.last_statement_position_ = RelocInfo::kNoPosition;
   thread_local_.last_fp_ = 0;
 }
 
@@ -1220,7 +1216,7 @@
   bool at_js_exit = false;
   RelocIterator it(debug_info->code());
   while (!it.done()) {
-    if (is_js_return(it.rinfo()->rmode())) {
+    if (RelocInfo::IsJSReturn(it.rinfo()->rmode())) {
       at_js_exit = it.rinfo()->pc() == addr - 1;
     }
     it.next();
diff --git a/src/debug.h b/src/debug.h
index 23b0ff2..52e8a98 100644
--- a/src/debug.h
+++ b/src/debug.h
@@ -97,11 +97,13 @@
   inline Address pc() { return reloc_iterator_->rinfo()->pc(); }
   inline Code* code() { return debug_info_->code(); }
   inline RelocInfo* rinfo() { return reloc_iterator_->rinfo(); }
-  inline RelocMode rmode() const { return reloc_iterator_->rinfo()->rmode(); }
+  inline RelocInfo::Mode rmode() const {
+    return reloc_iterator_->rinfo()->rmode();
+  }
   inline RelocInfo* original_rinfo() {
     return reloc_iterator_original_->rinfo();
   }
-  inline RelocMode original_rmode() const {
+  inline RelocInfo::Mode original_rmode() const {
     return reloc_iterator_original_->rinfo()->rmode();
   }
 
diff --git a/src/disasm-arm.cc b/src/disasm-arm.cc
index 88f6842..4aa9b78 100644
--- a/src/disasm-arm.cc
+++ b/src/disasm-arm.cc
@@ -855,13 +855,9 @@
 
 
 const char* NameConverter::NameOfAddress(byte* addr) const {
-  static char tmp_buffer[32];
-#ifdef WIN32
-  _snprintf(tmp_buffer, sizeof tmp_buffer, "%p", addr);
-#else
-  snprintf(tmp_buffer, sizeof tmp_buffer, "%p", addr);
-#endif
-  return tmp_buffer;
+  static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
+  v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
+  return tmp_buffer.start();
 }
 
 
diff --git a/src/disassembler.cc b/src/disassembler.cc
index 5d3412a..12183d5 100644
--- a/src/disassembler.cc
+++ b/src/disassembler.cc
@@ -139,6 +139,15 @@
                      *reinterpret_cast<int32_t*>(pc));
         constants = num_const;
         pc += 4;
+      } else if (it != NULL && !it->done() && it->rinfo()->pc() == pc &&
+          it->rinfo()->rmode() == RelocInfo::INTERNAL_REFERENCE) {
+        // raw pointer embedded in code stream, e.g., jump table
+        byte* ptr = *reinterpret_cast<byte**>(pc);
+        OS::SNPrintF(decode_buffer,
+                     "%08x      jump table entry %4d",
+                     reinterpret_cast<int32_t>(ptr),
+                     ptr - begin);
+        pc += 4;
       } else {
         decode_buffer[0] = '\0';
         pc += d.InstructionDecode(decode_buffer, pc);
@@ -148,11 +157,11 @@
     // Collect RelocInfo for this instruction (prev_pc .. pc-1)
     List<const char*> comments(4);
     List<byte*> pcs(1);
-    List<RelocMode> rmodes(1);
+    List<RelocInfo::Mode> rmodes(1);
     List<intptr_t> datas(1);
     if (it != NULL) {
       while (!it->done() && it->rinfo()->pc() < pc) {
-        if (is_comment(it->rinfo()->rmode())) {
+        if (RelocInfo::IsComment(it->rinfo()->rmode())) {
           // For comments just collect the text.
           comments.Add(reinterpret_cast<const char*>(it->rinfo()->data()));
         } else {
@@ -197,32 +206,32 @@
         out.AddPadding(' ', kRelocInfoPosition);
       }
 
-      RelocMode rmode = relocinfo.rmode();
-      if (is_position(rmode)) {
-        if (is_statement_position(rmode)) {
+      RelocInfo::Mode rmode = relocinfo.rmode();
+      if (RelocInfo::IsPosition(rmode)) {
+        if (RelocInfo::IsStatementPosition(rmode)) {
           out.AddFormatted("    ;; debug: statement %d", relocinfo.data());
         } else {
           out.AddFormatted("    ;; debug: position %d", relocinfo.data());
         }
-      } else if (rmode == embedded_object) {
+      } else if (rmode == RelocInfo::EMBEDDED_OBJECT) {
         HeapStringAllocator allocator;
         StringStream accumulator(&allocator);
         relocinfo.target_object()->ShortPrint(&accumulator);
         SmartPointer<char> obj_name = accumulator.ToCString();
         out.AddFormatted("    ;; object: %s", *obj_name);
-      } else if (rmode == external_reference) {
+      } else if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
         const char* reference_name =
             ref_encoder.NameOfAddress(*relocinfo.target_reference_address());
         out.AddFormatted("    ;; external reference (%s)", reference_name);
-      } else if (is_code_target(rmode)) {
+      } else if (RelocInfo::IsCodeTarget(rmode)) {
         out.AddFormatted("    ;; code:");
-        if (rmode == js_construct_call) {
+        if (rmode == RelocInfo::CONSTRUCT_CALL) {
           out.AddFormatted(" constructor,");
         }
         Code* code = Debug::GetCodeTarget(relocinfo.target_address());
         Code::Kind kind = code->kind();
         if (code->is_inline_cache_stub()) {
-          if (rmode == code_target_context) {
+          if (rmode == RelocInfo::CODE_TARGET_CONTEXT) {
             out.AddFormatted(" contextual,");
           }
           InlineCacheState ic_state = code->ic_state();
diff --git a/src/factory.cc b/src/factory.cc
index 444e7e2..3167a71 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -211,12 +211,8 @@
     // Store the object, regexp and array functions in the literals
     // array prefix.  These functions will be used when creating
     // object, regexp and array literals in this function.
-    literals->set(JSFunction::kLiteralObjectFunctionIndex,
-                  context->global_context()->object_function());
-    literals->set(JSFunction::kLiteralRegExpFunctionIndex,
-                  context->global_context()->regexp_function());
-    literals->set(JSFunction::kLiteralArrayFunctionIndex,
-                  context->global_context()->array_function());
+    literals->set(JSFunction::kLiteralGlobalContextIndex,
+                  context->global_context());
   }
   result->set_literals(*literals);
   ASSERT(!result->IsBoilerplate());
@@ -558,6 +554,12 @@
 }
 
 
+Handle<JSObject> Factory::NewJSObjectFromMap(Handle<Map> map) {
+  CALL_HEAP_FUNCTION(Heap::AllocateJSObjectFromMap(*map, NOT_TENURED),
+                     JSObject);
+}
+
+
 Handle<JSObject> Factory::NewObjectLiteral(int expected_number_of_properties) {
   Handle<Map> map = Handle<Map>(Top::object_function()->initial_map());
   map = Factory::CopyMap(map);
@@ -749,6 +751,47 @@
 }
 
 
+Handle<MapCache> Factory::NewMapCache(int at_least_space_for) {
+  CALL_HEAP_FUNCTION(MapCache::Allocate(at_least_space_for), MapCache);
+}
+
+
+static Object* UpdateMapCacheWith(Context* context,
+                                  FixedArray* keys,
+                                  Map* map) {
+  Object* result = MapCache::cast(context->map_cache())->Put(keys, map);
+  if (!result->IsFailure()) context->set_map_cache(MapCache::cast(result));
+  return result;
+}
+
+
+Handle<MapCache> Factory::AddToMapCache(Handle<Context> context,
+                                        Handle<FixedArray> keys,
+                                        Handle<Map> map) {
+  CALL_HEAP_FUNCTION(UpdateMapCacheWith(*context, *keys, *map), MapCache);
+}
+
+
+Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<Context> context,
+                                               Handle<FixedArray> keys) {
+  if (context->map_cache()->IsUndefined()) {
+    // Allocate the new map cache for the global context.
+    Handle<MapCache> new_cache = NewMapCache(24);
+    context->set_map_cache(*new_cache);
+  }
+  // Check to see whether there is a maching element in the cache.
+  Handle<MapCache> cache =
+      Handle<MapCache>(MapCache::cast(context->map_cache()));
+  Handle<Object> result = Handle<Object>(cache->Lookup(*keys));
+  if (result->IsMap()) return Handle<Map>::cast(result);
+  // Create a new map and add it to the cache.
+  Handle<Map> map =
+      CopyMap(Handle<Map>(context->object_function()->initial_map()));
+  AddToMapCache(context, keys, map);
+  return Handle<Map>(map);
+}
+
+
 void Factory::ConfigureInstance(Handle<FunctionTemplateInfo> desc,
                                 Handle<JSObject> instance,
                                 bool* pending_exception) {
diff --git a/src/factory.h b/src/factory.h
index 9efbcd5..0d854c0 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -167,6 +167,10 @@
   static Handle<JSObject> NewJSObject(Handle<JSFunction> constructor,
                                       PretenureFlag pretenure = NOT_TENURED);
 
+  // JS objects are pretenured when allocated by the bootstrapper and
+  // runtime.
+  static Handle<JSObject> NewJSObjectFromMap(Handle<Map> map);
+
   // Allocate a JS object representing an object literal.  The object is
   // pretenured (allocated directly in the old generation).
   static Handle<JSObject> NewObjectLiteral(int expected_number_of_properties);
@@ -291,6 +295,12 @@
 
   static Handle<DebugInfo> NewDebugInfo(Handle<SharedFunctionInfo> shared);
 
+
+  // Return a map using the map cache in the global context.
+  // The key the an ordered set of property names.
+  static Handle<Map> ObjectLiteralMapFromCache(Handle<Context> context,
+                                               Handle<FixedArray> keys);
+
  private:
   static Handle<JSFunction> NewFunctionHelper(Handle<String> name,
                                               Handle<Object> prototype);
@@ -302,6 +312,14 @@
   static Handle<JSFunction> BaseNewFunctionFromBoilerplate(
       Handle<JSFunction> boilerplate,
       Handle<Map> function_map);
+
+  // Create a new map cache.
+  static Handle<MapCache> NewMapCache(int at_least_space_for);
+
+  // Update the map cache in the global context with (keys, map)
+  static Handle<MapCache> AddToMapCache(Handle<Context> context,
+                                        Handle<FixedArray> keys,
+                                        Handle<Map> map);
 };
 
 
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index ac0aeb6..52c2847 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -154,6 +154,9 @@
 DEFINE_bool(cleanup_caches_in_maps_at_gc, true,
             "Flush code caches in maps during mark compact cycle.")
 
+DEFINE_bool(canonicalize_object_literal_maps, true,
+            "Canonicalize maps for object literals.")
+
 // mksnapshot.cc
 DEFINE_bool(h, false, "print this message")
 
diff --git a/src/frames-ia32.h b/src/frames-ia32.h
index a39e5b9..e31906d 100644
--- a/src/frames-ia32.h
+++ b/src/frames-ia32.h
@@ -81,8 +81,8 @@
 
 class ExitFrameConstants : public AllStatic {
  public:
-  static const int kDebugMarkOffset = -3 * kPointerSize;
-  static const int kSPOffset        = -2 * kPointerSize;
+  static const int kDebugMarkOffset = -2 * kPointerSize;
+  static const int kSPOffset        = -1 * kPointerSize;
 
   // Let the parameters pointer for exit frames point just below the
   // frame structure on the stack (frame pointer and return address).
diff --git a/src/globals.h b/src/globals.h
index f2c9ac4..2a5cf34 100644
--- a/src/globals.h
+++ b/src/globals.h
@@ -303,10 +303,11 @@
   CONSTANT_FUNCTION   = 2,  // only in fast mode
   CALLBACKS           = 3,
   INTERCEPTOR         = 4,  // only in lookup results, not in descriptors.
-  FIRST_PHANTOM_PROPERTY_TYPE = 5,  // All properties before this are real.
   MAP_TRANSITION      = 5,  // only in fast mode
   CONSTANT_TRANSITION = 6,  // only in fast mode
-  NULL_DESCRIPTOR     = 7   // only in fast mode
+  NULL_DESCRIPTOR     = 7,  // only in fast mode
+  // All properties before MAP_TRANSITION are real.
+  FIRST_PHANTOM_PROPERTY_TYPE = MAP_TRANSITION
 };
 
 
diff --git a/src/handles.cc b/src/handles.cc
index 1fbb8ae..8c1b957 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -332,13 +332,6 @@
 Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object) {
   Handle<FixedArray> content = Factory::empty_fixed_array();
 
-  // Check access rights if required.
-  if (object->IsAccessCheckNeeded() &&
-    !Top::MayNamedAccess(*object, Heap::undefined_value(), v8::ACCESS_KEYS)) {
-    Top::ReportFailedAccessCheck(*object, v8::ACCESS_KEYS);
-    return content;
-  }
-
   JSObject* arguments_boilerplate =
       Top::context()->global_context()->arguments_boilerplate();
   JSFunction* arguments_function =
@@ -352,6 +345,14 @@
          p = Handle<Object>(p->GetPrototype())) {
       Handle<JSObject> current(JSObject::cast(*p));
 
+      // Check access rights if required.
+      if (current->IsAccessCheckNeeded() &&
+        !Top::MayNamedAccess(*current, Heap::undefined_value(),
+                             v8::ACCESS_KEYS)) {
+        Top::ReportFailedAccessCheck(*current, v8::ACCESS_KEYS);
+        break;
+      }
+
       // Compute the property keys.
       content = UnionOfKeys(content, GetEnumPropertyKeys(current));
 
diff --git a/src/heap.cc b/src/heap.cc
index b2aaf32..ad2c038 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -974,15 +974,15 @@
   STRUCT_LIST(ALLOCATE_STRUCT_MAP)
 #undef ALLOCATE_STRUCT_MAP
 
-  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kSize);
+  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
   if (obj->IsFailure()) return false;
   hash_table_map_ = Map::cast(obj);
 
-  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kSize);
+  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
   if (obj->IsFailure()) return false;
   context_map_ = Map::cast(obj);
 
-  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kSize);
+  obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
   if (obj->IsFailure()) return false;
   global_context_map_ = Map::cast(obj);
 
diff --git a/src/ic-arm.cc b/src/ic-arm.cc
index 6977e17..fee95e0 100644
--- a/src/ic-arm.cc
+++ b/src/ic-arm.cc
@@ -151,7 +151,7 @@
   // Cache miss: Jump to runtime.
   __ bind(&miss);
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 }
 
 
@@ -187,7 +187,7 @@
   // Cache miss: Jump to runtime.
   __ bind(&miss);
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 }
 
 
@@ -222,7 +222,7 @@
   // Cache miss: Jump to runtime.
   __ bind(&miss);
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 }
 
 
@@ -256,7 +256,7 @@
   // Cache miss: Jump to runtime.
   __ bind(&miss);
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 }
 
 
@@ -271,7 +271,7 @@
   // sub-optimal. We should port the fast case code from IA-32.
 
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 }
 
 
@@ -427,7 +427,7 @@
   // Move result to r1.
   __ mov(r1, Operand(r0));
 
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
 
   // Patch the function on the stack; 1 ~ receiver.
   __ str(r1, MemOperand(sp, (argc + 1) * kPointerSize));
diff --git a/src/ic-ia32.cc b/src/ic-ia32.cc
index 37c35ee..b9f77d5 100644
--- a/src/ic-ia32.cc
+++ b/src/ic-ia32.cc
@@ -528,7 +528,7 @@
 
   // Move result to edi and exit the internal frame.
   __ mov(Operand(edi), eax);
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
 
   // Invoke the function.
   ParameterCount actual(argc);
diff --git a/src/ic.cc b/src/ic.cc
index 59794ea..8cfca1e 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -164,11 +164,9 @@
   // builtins are loaded lazily.  It is important to keep inline
   // caches for the builtins object monomorphic.  Therefore, if we get
   // an inline cache miss for the builtins object after lazily loading
-  // JavaScript builtins, we clear the code cache and return
-  // uninitialized as the state to force the inline cache back to
-  // monomorphic state.
+  // JavaScript builtins, we return uninitialized as the state to
+  // force the inline cache back to monomorphic state.
   if (receiver->IsJSBuiltinsObject()) {
-    map->ClearCodeCache();
     return UNINITIALIZED;
   }
 
@@ -176,7 +174,7 @@
 }
 
 
-RelocMode IC::ComputeMode() {
+RelocInfo::Mode IC::ComputeMode() {
   Address addr = address();
   Code* code = Code::cast(Heap::FindCodeObject(addr));
   for (RelocIterator it(code, RelocInfo::kCodeTargetMask);
@@ -185,7 +183,7 @@
     if (info->pc() == addr) return info->rmode();
   }
   UNREACHABLE();
-  return no_reloc;
+  return RelocInfo::NONE;
 }
 
 
diff --git a/src/ic.h b/src/ic.h
index fa45c7b..6534da0 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -90,11 +90,13 @@
   // Computes the reloc info for this IC. This is a fairly expensive
   // operation as it has to search through the heap to find the code
   // object that contains this IC site.
-  RelocMode ComputeMode();
+  RelocInfo::Mode ComputeMode();
 
   // Returns if this IC is for contextual (no explicit receiver)
   // access to properties.
-  bool is_contextual() { return ComputeMode() == code_target_context; }
+  bool is_contextual() {
+    return ComputeMode() == RelocInfo::CODE_TARGET_CONTEXT;
+  }
 
   // Returns the map to use for caching stubs for a given object.
   // This method should not be called with undefined or null.
diff --git a/src/jsregexp.cc b/src/jsregexp.cc
index b59a3af..851700b 100644
--- a/src/jsregexp.cc
+++ b/src/jsregexp.cc
@@ -142,7 +142,7 @@
 }
 
 
-Handle<Object> RegExpImpl::JsreCompile(Handle<JSValue> re,
+Handle<Object> RegExpImpl::JsreCompile(Handle<JSRegExp> re,
                                        Handle<String> pattern,
                                        Handle<String> flags) {
   JSRegExpIgnoreCaseOption case_option = JSRegExpDoNotIgnoreCase;
@@ -159,10 +159,10 @@
   const char* error_message = NULL;
 
   malloc_failure = Failure::Exception();
-  JSRegExp* code = jsRegExpCompile(two_byte_pattern->GetTwoByteData(),
-                                   pattern->length(), case_option,
-                                   multiline_option, &number_of_captures,
-                                   &error_message, &JSREMalloc, &JSREFree);
+  JscreRegExp* code = jsRegExpCompile(two_byte_pattern->GetTwoByteData(),
+                                      pattern->length(), case_option,
+                                      multiline_option, &number_of_captures,
+                                      &error_message, &JSREMalloc, &JSREFree);
 
   if (code == NULL && malloc_failure->IsRetryAfterGC()) {
     // Performs a GC, then retries.
@@ -203,7 +203,8 @@
   Handle<FixedArray> value = Factory::NewFixedArray(2);
   value->set(CAPTURE_INDEX, Smi::FromInt(number_of_captures));
   value->set(INTERNAL_INDEX, *internal);
-  re->set_value(*value);
+  re->set_type_tag(JSRegExp::JSCRE);
+  re->set_data(*value);
 
   LOG(RegExpCompileEvent(re));
 
@@ -211,7 +212,7 @@
 }
 
 
-Handle<Object> RegExpImpl::JsreExecOnce(Handle<JSValue> regexp,
+Handle<Object> RegExpImpl::JsreExecOnce(Handle<JSRegExp> regexp,
                                         int num_captures,
                                         Handle<String> subject,
                                         int previous_index,
@@ -222,16 +223,17 @@
   {
     AssertNoAllocation a;
     ByteArray* internal = JsreInternal(regexp);
-    const JSRegExp* js_regexp =
-        reinterpret_cast<JSRegExp*>(internal->GetDataStartAddress());
+    const JscreRegExp* js_regexp =
+        reinterpret_cast<JscreRegExp*>(internal->GetDataStartAddress());
 
     LOG(RegExpExecEvent(regexp, previous_index, subject));
 
-    rc = jsRegExpExecute(js_regexp, two_byte_subject,
-                       subject->length(),
-                       previous_index,
-                       offsets_vector,
-                       offsets_vector_length);
+    rc = jsRegExpExecute(js_regexp,
+                         two_byte_subject,
+                         subject->length(),
+                         previous_index,
+                         offsets_vector,
+                         offsets_vector_length);
   }
 
   // The KJS JavaScript engine returns null (ie, a failed match) when
@@ -304,7 +306,7 @@
     OffsetsVector::kStaticOffsetsVectorSize];
 
 
-Handle<Object> RegExpImpl::JsreExec(Handle<JSValue> regexp,
+Handle<Object> RegExpImpl::JsreExec(Handle<JSRegExp> regexp,
                                     Handle<String> subject,
                                     Handle<Object> index) {
   // Prepare space for the return values.
@@ -325,7 +327,7 @@
 }
 
 
-Handle<Object> RegExpImpl::JsreExecGlobal(Handle<JSValue> regexp,
+Handle<Object> RegExpImpl::JsreExecGlobal(Handle<JSRegExp> regexp,
                                           Handle<String> subject) {
   // Prepare space for the return values.
   int num_captures = JsreCapture(regexp);
@@ -370,15 +372,15 @@
 }
 
 
-int RegExpImpl::JsreCapture(Handle<JSValue> re) {
-  Object* value = re->value();
+int RegExpImpl::JsreCapture(Handle<JSRegExp> re) {
+  Object* value = re->data();
   ASSERT(value->IsFixedArray());
   return Smi::cast(FixedArray::cast(value)->get(CAPTURE_INDEX))->value();
 }
 
 
-ByteArray* RegExpImpl::JsreInternal(Handle<JSValue> re) {
-  Object* value = re->value();
+ByteArray* RegExpImpl::JsreInternal(Handle<JSRegExp> re) {
+  Object* value = re->data();
   ASSERT(value->IsFixedArray());
   return ByteArray::cast(FixedArray::cast(value)->get(INTERNAL_INDEX));
 }
diff --git a/src/jsregexp.h b/src/jsregexp.h
index 32835bf..9fc15bc 100644
--- a/src/jsregexp.h
+++ b/src/jsregexp.h
@@ -44,21 +44,21 @@
   // This function calls the garbage collector if necessary.
   static Handle<String> ToString(Handle<Object> value);
 
-  static Handle<Object> JsreCompile(Handle<JSValue> re,
+  static Handle<Object> JsreCompile(Handle<JSRegExp> re,
                                     Handle<String> pattern,
                                     Handle<String> flags);
 
   // Implements RegExp.prototype.exec(string) function.
   // See ECMA-262 section 15.10.6.2.
   // This function calls the garbage collector if necessary.
-  static Handle<Object> JsreExec(Handle<JSValue> regexp,
+  static Handle<Object> JsreExec(Handle<JSRegExp> regexp,
                                  Handle<String> subject,
                                  Handle<Object> index);
 
   // Call RegExp.prototyp.exec(string) in a loop.
   // Used by String.prototype.match and String.prototype.replace.
   // This function calls the garbage collector if necessary.
-  static Handle<Object> JsreExecGlobal(Handle<JSValue> regexp,
+  static Handle<Object> JsreExecGlobal(Handle<JSRegExp> regexp,
                                        Handle<String> subject);
 
   static void NewSpaceCollectionPrologue();
@@ -75,11 +75,11 @@
   static String* two_byte_cached_string_;
 
   // Returns the caputure from the re.
-  static int JsreCapture(Handle<JSValue> re);
-  static ByteArray* JsreInternal(Handle<JSValue> re);
+  static int JsreCapture(Handle<JSRegExp> re);
+  static ByteArray* JsreInternal(Handle<JSRegExp> re);
 
   // Call jsRegExpExecute once
-  static Handle<Object> JsreExecOnce(Handle<JSValue> regexp,
+  static Handle<Object> JsreExecOnce(Handle<JSRegExp> regexp,
                                      int num_captures,
                                      Handle<String> subject,
                                      int previous_index,
diff --git a/src/log.cc b/src/log.cc
index a8159a0..80b5454 100644
--- a/src/log.cc
+++ b/src/log.cc
@@ -348,7 +348,7 @@
 
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
-void Logger::LogRegExpSource(Handle<JSValue> regexp) {
+void Logger::LogRegExpSource(Handle<JSRegExp> regexp) {
   // Prints "/" + re.source + "/" +
   //      (re.global?"g":"") + (re.ignorecase?"i":"") + (re.multiline?"m":"")
 
@@ -392,7 +392,7 @@
 #endif  // ENABLE_LOGGING_AND_PROFILING
 
 
-void Logger::RegExpCompileEvent(Handle<JSValue> regexp) {
+void Logger::RegExpCompileEvent(Handle<JSRegExp> regexp) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
   if (logfile_ == NULL || !FLAG_log_regexp) return;
   ScopedLock sl(mutex_);
@@ -404,7 +404,7 @@
 }
 
 
-void Logger::RegExpExecEvent(Handle<JSValue> regexp,
+void Logger::RegExpExecEvent(Handle<JSRegExp> regexp,
                              int start_index,
                              Handle<String> input_string) {
 #ifdef ENABLE_LOGGING_AND_PROFILING
diff --git a/src/log.h b/src/log.h
index 1e67fe9..ef77117 100644
--- a/src/log.h
+++ b/src/log.h
@@ -181,9 +181,9 @@
   // ==== Events logged by --log-regexp ====
   // Regexp compilation and execution events.
 
-  static void RegExpCompileEvent(Handle<JSValue> regexp);
+  static void RegExpCompileEvent(Handle<JSRegExp> regexp);
 
-  static void RegExpExecEvent(Handle<JSValue> regexp,
+  static void RegExpExecEvent(Handle<JSRegExp> regexp,
                               int start_index,
                               Handle<String> input_string);
 
@@ -197,7 +197,7 @@
  private:
 
   // Emits the source code of a regexp. Used by regexp events.
-  static void LogRegExpSource(Handle<JSValue> regexp);
+  static void LogRegExpSource(Handle<JSRegExp> regexp);
 
   // Emits a profiler tick event. Used by the profiler thread.
   static void TickEvent(TickSample* sample, bool overflow);
diff --git a/src/macro-assembler-arm.cc b/src/macro-assembler-arm.cc
index d85a1de..5e5b9c9 100644
--- a/src/macro-assembler-arm.cc
+++ b/src/macro-assembler-arm.cc
@@ -84,7 +84,8 @@
 }
 
 
-void MacroAssembler::Jump(intptr_t target, RelocMode rmode, Condition cond) {
+void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
+                          Condition cond) {
 #if USE_BX
   mov(ip, Operand(target, rmode), LeaveCC, cond);
   bx(ip, cond);
@@ -94,14 +95,16 @@
 }
 
 
-void MacroAssembler::Jump(byte* target, RelocMode rmode, Condition cond) {
-  ASSERT(!is_code_target(rmode));
+void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
+                          Condition cond) {
+  ASSERT(!RelocInfo::IsCodeTarget(rmode));
   Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
 }
 
 
-void MacroAssembler::Jump(Handle<Code> code, RelocMode rmode, Condition cond) {
-  ASSERT(is_code_target(rmode));
+void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
+                          Condition cond) {
+  ASSERT(RelocInfo::IsCodeTarget(rmode));
   // 'code' is always generated ARM code, never THUMB code
   Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
 }
@@ -118,9 +121,10 @@
 }
 
 
-void MacroAssembler::Call(intptr_t target, RelocMode rmode, Condition cond) {
+void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
+                          Condition cond) {
 #if !defined(__arm__)
-  if (rmode == runtime_entry) {
+  if (rmode == RelocInfo::RUNTIME_ENTRY) {
     mov(r2, Operand(target, rmode), LeaveCC, cond);
     // Set lr for return at current pc + 8.
     mov(lr, Operand(pc), LeaveCC, cond);
@@ -148,14 +152,16 @@
 }
 
 
-void MacroAssembler::Call(byte* target, RelocMode rmode, Condition cond) {
-  ASSERT(!is_code_target(rmode));
+void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode,
+                          Condition cond) {
+  ASSERT(!RelocInfo::IsCodeTarget(rmode));
   Call(reinterpret_cast<intptr_t>(target), rmode, cond);
 }
 
 
-void MacroAssembler::Call(Handle<Code> code, RelocMode rmode, Condition cond) {
-  ASSERT(is_code_target(rmode));
+void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
+                          Condition cond) {
+  ASSERT(RelocInfo::IsCodeTarget(rmode));
   // 'code' is always generated ARM code, never THUMB code
   Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
 }
@@ -258,7 +264,7 @@
 }
 
 
-void MacroAssembler::ExitInternalFrame() {
+void MacroAssembler::LeaveInternalFrame() {
   // r0: preserved
   // r1: preserved
   // r2: preserved
@@ -270,6 +276,75 @@
 }
 
 
+void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
+  ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
+  // Compute parameter pointer before making changes and save it as ip
+  // register so that it is restored as sp register on exit, thereby
+  // popping the args.
+
+  // ip = sp + kPointerSize * #args;
+  add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
+
+  // Push in reverse order: caller_fp, sp_on_exit, and caller_pc.
+  stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
+  mov(fp, Operand(sp));  // setup new frame pointer
+
+  // Push debug marker.
+  mov(ip, Operand(type == StackFrame::EXIT_DEBUG ? 1 : 0));
+  push(ip);
+
+  // Save the frame pointer and the context in top.
+  mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
+  str(fp, MemOperand(ip));
+  mov(ip, Operand(ExternalReference(Top::k_context_address)));
+  str(cp, MemOperand(ip));
+
+  // Setup argc and the builtin function in callee-saved registers.
+  mov(r4, Operand(r0));
+  mov(r5, Operand(r1));
+
+  // Compute the argv pointer and keep it in a callee-saved register.
+  add(r6, fp, Operand(r4, LSL, kPointerSizeLog2));
+  add(r6, r6, Operand(ExitFrameConstants::kPPDisplacement - kPointerSize));
+
+  // Save the state of all registers to the stack from the memory
+  // location. This is needed to allow nested break points.
+  if (type == StackFrame::EXIT_DEBUG) {
+    // Use sp as base to push.
+    CopyRegistersFromMemoryToStack(sp, kJSCallerSaved);
+  }
+}
+
+
+void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
+  // Restore the memory copy of the registers by digging them out from
+  // the stack. This is needed to allow nested break points.
+  if (type == StackFrame::EXIT_DEBUG) {
+    // This code intentionally clobbers r2 and r3.
+    const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
+    const int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
+    add(r3, fp, Operand(kOffset));
+    CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved);
+  }
+
+  // Clear top frame.
+  mov(r3, Operand(0));
+  mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
+  str(r3, MemOperand(ip));
+
+  // Restore current context from top and clear it in debug mode.
+  mov(ip, Operand(ExternalReference(Top::k_context_address)));
+  ldr(cp, MemOperand(ip));
+  if (kDebug) {
+    str(r3, MemOperand(ip));
+  }
+
+  // Pop the arguments, restore registers, and return.
+  mov(sp, Operand(fp));  // respect ABI stack constraint
+  ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
+}
+
+
 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
                                     const ParameterCount& actual,
                                     Handle<Code> code_constant,
@@ -330,10 +405,10 @@
     Handle<Code> adaptor =
         Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
     if (flag == CALL_FUNCTION) {
-      Call(adaptor, code_target);
+      Call(adaptor, RelocInfo::CODE_TARGET);
       b(done);
     } else {
-      Jump(adaptor, code_target);
+      Jump(adaptor, RelocInfo::CODE_TARGET);
     }
     bind(&regular_invoke);
   }
@@ -363,7 +438,7 @@
 void MacroAssembler::InvokeCode(Handle<Code> code,
                                 const ParameterCount& expected,
                                 const ParameterCount& actual,
-                                RelocMode rmode,
+                                RelocInfo::Mode rmode,
                                 InvokeFlag flag) {
   Label done;
 
@@ -603,13 +678,7 @@
 
 void MacroAssembler::CallStub(CodeStub* stub) {
   ASSERT(allow_stub_calls());  // stub calls are not allowed in some stubs
-  Call(stub->GetCode(), code_target);
-}
-
-
-void MacroAssembler::CallJSExitStub(CodeStub* stub) {
-  ASSERT(allow_stub_calls());  // stub calls are not allowed in some stubs
-  Call(stub->GetCode(), exit_js_frame);
+  Call(stub->GetCode(), RelocInfo::CODE_TARGET);
 }
 
 
@@ -658,7 +727,7 @@
 #endif
   mov(r1, Operand(builtin));
   CEntryStub stub;
-  Jump(stub.GetCode(), code_target);
+  Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
 }
 
 
@@ -681,10 +750,10 @@
   Handle<Code> code = ResolveBuiltin(id, &resolved);
 
   if (flags == CALL_JS) {
-    Call(code, code_target);
+    Call(code, RelocInfo::CODE_TARGET);
   } else {
     ASSERT(flags == JUMP_JS);
-    Jump(code, code_target);
+    Jump(code, RelocInfo::CODE_TARGET);
   }
 
   if (!resolved) {
diff --git a/src/macro-assembler-arm.h b/src/macro-assembler-arm.h
index 0af585d..9c046ac 100644
--- a/src/macro-assembler-arm.h
+++ b/src/macro-assembler-arm.h
@@ -77,15 +77,15 @@
 
   // Jump, Call, and Ret pseudo instructions implementing inter-working
  private:
-  void Jump(intptr_t target, RelocMode rmode, Condition cond = al);
-  void Call(intptr_t target, RelocMode rmode, Condition cond = al);
+  void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
+  void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
  public:
   void Jump(Register target, Condition cond = al);
-  void Jump(byte* target, RelocMode rmode, Condition cond = al);
-  void Jump(Handle<Code> code, RelocMode rmode, Condition cond = al);
+  void Jump(byte* target, RelocInfo::Mode rmode, Condition cond = al);
+  void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
   void Call(Register target, Condition cond = al);
-  void Call(byte* target, RelocMode rmode, Condition cond = al);
-  void Call(Handle<Code> code, RelocMode rmode, Condition cond = al);
+  void Call(byte* target, RelocInfo::Mode rmode, Condition cond = al);
+  void Call(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
   void Ret();
 
   // Sets the remembered set bit for [address+offset], where address is the
@@ -99,7 +99,16 @@
   // Activation frames
 
   void EnterInternalFrame();
-  void ExitInternalFrame();
+  void LeaveInternalFrame();
+
+  // Enter specific kind of exit frame; either EXIT or
+  // EXIT_DEBUG. Expects the number of arguments in register r0 and
+  // the builtin function to call in register r1. Exits with argc in
+  // r4, argv in r6, and and the builtin function to call in r5.
+  void EnterExitFrame(StackFrame::Type type);
+
+  // Leave the current exit frame. Expects the return value in r0.
+  void LeaveExitFrame(StackFrame::Type type);
 
 
   // ---------------------------------------------------------------------------
@@ -114,7 +123,7 @@
   void InvokeCode(Handle<Code> code,
                   const ParameterCount& expected,
                   const ParameterCount& actual,
-                  RelocMode rmode,
+                  RelocInfo::Mode rmode,
                   InvokeFlag flag);
 
   // Invoke the JavaScript function in the given register. Changes the
diff --git a/src/macro-assembler-ia32.cc b/src/macro-assembler-ia32.cc
index b59735a..dab6778 100644
--- a/src/macro-assembler-ia32.cc
+++ b/src/macro-assembler-ia32.cc
@@ -325,7 +325,7 @@
 }
 
 
-void MacroAssembler::ExitInternalFrame() {
+void MacroAssembler::LeaveInternalFrame() {
   if (FLAG_debug_code) {
     StackFrame::Type type = StackFrame::INTERNAL;
     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
@@ -336,6 +336,94 @@
 }
 
 
+void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
+  ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG);
+
+  // Setup the frame structure on the stack.
+  ASSERT(ExitFrameConstants::kPPDisplacement == +2 * kPointerSize);
+  ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
+  ASSERT(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
+  push(ebp);
+  mov(ebp, Operand(esp));
+
+  // Reserve room for entry stack pointer and push the debug marker.
+  ASSERT(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
+  push(Immediate(0));  // saved entry sp, patched before call
+  push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0));
+
+  // Save the frame pointer and the context in top.
+  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
+  ExternalReference context_address(Top::k_context_address);
+  mov(Operand::StaticVariable(c_entry_fp_address), ebp);
+  mov(Operand::StaticVariable(context_address), esi);
+
+  // Setup argc and argv in callee-saved registers.
+  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
+  mov(edi, Operand(eax));
+  lea(esi, Operand(ebp, eax, times_4, offset));
+
+  // Save the state of all registers to the stack from the memory
+  // location. This is needed to allow nested break points.
+  if (type == StackFrame::EXIT_DEBUG) {
+    // TODO(1243899): This should be symmetric to
+    // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
+    // correct here, but computed for the other call. Very error
+    // prone! FIX THIS.  Actually there are deeper problems with
+    // register saving than this asymmetry (see the bug report
+    // associated with this issue).
+    PushRegistersFromMemory(kJSCallerSaved);
+  }
+
+  // Reserve space for two arguments: argc and argv.
+  sub(Operand(esp), Immediate(2 * kPointerSize));
+
+  // Get the required frame alignment for the OS.
+  static const int kFrameAlignment = OS::ActivationFrameAlignment();
+  if (kFrameAlignment > 0) {
+    ASSERT(IsPowerOf2(kFrameAlignment));
+    and_(esp, -kFrameAlignment);
+  }
+
+  // Patch the saved entry sp.
+  mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
+}
+
+
+void MacroAssembler::LeaveExitFrame(StackFrame::Type type) {
+  // Restore the memory copy of the registers by digging them out from
+  // the stack. This is needed to allow nested break points.
+  if (type == StackFrame::EXIT_DEBUG) {
+    // It's okay to clobber register ebx below because we don't need
+    // the function pointer after this.
+    const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
+    int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize;
+    lea(ebx, Operand(ebp, kOffset));
+    CopyRegistersFromStackToMemory(ebx, ecx, kJSCallerSaved);
+  }
+
+  // Get the return address from the stack and restore the frame pointer.
+  mov(ecx, Operand(ebp, 1 * kPointerSize));
+  mov(ebp, Operand(ebp, 0 * kPointerSize));
+
+  // Pop the arguments and the receiver from the caller stack.
+  lea(esp, Operand(esi, 1 * kPointerSize));
+
+  // Restore current context from top and clear it in debug mode.
+  ExternalReference context_address(Top::k_context_address);
+  mov(esi, Operand::StaticVariable(context_address));
+  if (kDebug) {
+    mov(Operand::StaticVariable(context_address), Immediate(0));
+  }
+
+  // Push the return address to get ready to return.
+  push(ecx);
+
+  // Clear the top frame.
+  ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
+  mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
+}
+
+
 void MacroAssembler::PushTryHandler(CodeLocation try_location,
                                     HandlerType type) {
   ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize);  // adjust this code
@@ -503,7 +591,7 @@
 
 void MacroAssembler::CallStub(CodeStub* stub) {
   ASSERT(allow_stub_calls());  // calls are not allowed in some stubs
-  call(stub->GetCode(), code_target);
+  call(stub->GetCode(), RelocInfo::CODE_TARGET);
 }
 
 
@@ -554,7 +642,7 @@
   // Set the entry point and jump to the C entry runtime stub.
   mov(Operand(ebx), Immediate(ext));
   CEntryStub ces;
-  jmp(ces.GetCode(), code_target);
+  jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
 }
 
 
@@ -613,10 +701,10 @@
     }
 
     if (flag == CALL_FUNCTION) {
-      call(adaptor, code_target);
+      call(adaptor, RelocInfo::CODE_TARGET);
       jmp(done);
     } else {
-      jmp(adaptor, code_target);
+      jmp(adaptor, RelocInfo::CODE_TARGET);
     }
     bind(&invoke);
   }
@@ -642,7 +730,7 @@
 void MacroAssembler::InvokeCode(Handle<Code> code,
                                 const ParameterCount& expected,
                                 const ParameterCount& actual,
-                                RelocMode rmode,
+                                RelocInfo::Mode rmode,
                                 InvokeFlag flag) {
   Label done;
   Operand dummy(eax);
@@ -683,7 +771,8 @@
   // arguments match the expected number of arguments. Fake a
   // parameter count to avoid emitting code to do the check.
   ParameterCount expected(0);
-  InvokeCode(Handle<Code>(code), expected, expected, code_target, flag);
+  InvokeCode(Handle<Code>(code), expected, expected,
+             RelocInfo::CODE_TARGET, flag);
 
   const char* name = Builtins::GetName(id);
   int argc = Builtins::GetArgumentsCount(id);
diff --git a/src/macro-assembler-ia32.h b/src/macro-assembler-ia32.h
index b3189c0..9a923e4 100644
--- a/src/macro-assembler-ia32.h
+++ b/src/macro-assembler-ia32.h
@@ -86,10 +86,19 @@
   // ---------------------------------------------------------------------------
   // Activation frames
 
-  // Enter or exit a stack frame of the given type. Cannot be used to
-  // construct or leave JavaScript frames.
   void EnterInternalFrame();
-  void ExitInternalFrame();
+  void LeaveInternalFrame();
+
+  // Enter specific kind of exit frame; either EXIT or
+  // EXIT_DEBUG. Expects the number of arguments in register eax and
+  // sets up the number of arguments in register edi and the pointer
+  // to the first argument in register esi.
+  void EnterExitFrame(StackFrame::Type type);
+
+  // Leave the current exit frame. Expects the return value in
+  // register eax:edx (untouched) and the pointer to the first
+  // argument in register esi.
+  void LeaveExitFrame(StackFrame::Type type);
 
 
   // ---------------------------------------------------------------------------
@@ -104,7 +113,7 @@
   void InvokeCode(Handle<Code> code,
                   const ParameterCount& expected,
                   const ParameterCount& actual,
-                  RelocMode rmode,
+                  RelocInfo::Mode rmode,
                   InvokeFlag flag);
 
   // Invoke the JavaScript function in the given register. Changes the
@@ -198,6 +207,10 @@
   // Jump to the builtin routine.
   void JumpToBuiltin(const ExternalReference& ext);
 
+
+  // ---------------------------------------------------------------------------
+  // Utilities
+
   void Ret();
 
   struct Unresolved {
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 9ab6ae6..e1937b8 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -277,7 +277,7 @@
   }
 
   void VisitCodeTarget(RelocInfo* rinfo) {
-    ASSERT(is_code_target(rinfo->rmode()));
+    ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
     Code* code = CodeFromDerivedPointer(rinfo->target_address());
     if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
       IC::Clear(rinfo->pc());
@@ -294,7 +294,8 @@
   }
 
   void VisitDebugTarget(RelocInfo* rinfo) {
-    ASSERT(is_js_return(rinfo->rmode()) && rinfo->is_call_instruction());
+    ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) &&
+           rinfo->is_call_instruction());
     HeapObject* code = CodeFromDerivedPointer(rinfo->call_address());
     MarkCompactCollector::MarkObject(code);
     // When compacting we convert the call to a real object pointer.
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index dd656e0..d280153 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -118,6 +118,7 @@
       break;
     case JS_OBJECT_TYPE:  // fall through
     case JS_ARRAY_TYPE:
+    case JS_REGEXP_TYPE:
       JSObject::cast(this)->JSObjectPrint();
       break;
     case ODDBALL_TYPE:
@@ -206,6 +207,9 @@
     case JS_ARRAY_TYPE:
       JSArray::cast(this)->JSArrayVerify();
       break;
+    case JS_REGEXP_TYPE:
+      JSRegExp::cast(this)->JSRegExpVerify();
+      break;
     case FILLER_TYPE:
       break;
     case PROXY_TYPE:
@@ -375,6 +379,7 @@
     case JS_FUNCTION_TYPE: return "JS_FUNCTION";
     case CODE_TYPE: return "CODE";
     case JS_ARRAY_TYPE: return "JS_ARRAY";
+    case JS_REGEXP_TYPE: return "JS_REGEXP";
     case JS_VALUE_TYPE: return "JS_VALUE";
     case JS_GLOBAL_OBJECT_TYPE: return "JS_GLOBAL_OBJECT";
     case JS_BUILTINS_OBJECT_TYPE: return "JS_BUILTINS_OBJECT";
@@ -607,7 +612,7 @@
   for (RelocIterator it(this); !it.done(); it.next()) {
     it.rinfo()->Verify();
     // Ensure that GC will not iterate twice over the same pointer.
-    if (is_gc_reloc_mode(it.rinfo()->rmode())) {
+    if (RelocInfo::IsGCRelocMode(it.rinfo()->rmode())) {
       CHECK(it.rinfo()->pc() != last_gc_pc);
       last_gc_pc = it.rinfo()->pc();
     }
@@ -622,6 +627,13 @@
 }
 
 
+void JSRegExp::JSRegExpVerify() {
+  JSObjectVerify();
+  ASSERT(type()->IsSmi() || type()->IsUndefined());
+  ASSERT(data()->IsUndefined() || data()->IsFixedArray());
+}
+
+
 void Proxy::ProxyPrint() {
   PrintF("proxy to %p", proxy());
 }
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 5bcf0a7..2645363 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -293,6 +293,12 @@
 }
 
 
+bool Object::IsJSRegExp() {
+  return Object::IsHeapObject()
+    && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
+}
+
+
 template <> inline bool Is<JSArray>(Object* obj) {
   return obj->IsJSArray();
 }
@@ -319,6 +325,11 @@
 }
 
 
+bool Object::IsMapCache() {
+  return IsHashTable();
+}
+
+
 bool Object::IsPrimitive() {
   return IsOddball() || IsNumber() || IsString();
 }
@@ -487,7 +498,7 @@
 
 
 Object* HeapObject::GetHeapObjectField(HeapObject* obj, int index) {
-  return READ_FIELD(obj, HeapObject::kSize + kPointerSize * index);
+  return READ_FIELD(obj, HeapObject::kHeaderSize + kPointerSize * index);
 }
 
 
@@ -756,7 +767,9 @@
   ASSERT(map() == from->map());
   ASSERT(Size() == from->Size());
   int object_size = Size();
-  for (int offset = kSize; offset < object_size;  offset += kPointerSize) {
+  for (int offset = kHeaderSize;
+       offset < object_size;
+       offset += kPointerSize) {
     Object* value = READ_FIELD(from, offset);
     // Note: WRITE_FIELD does not update the write barrier.
     WRITE_FIELD(this, offset, value);
@@ -848,6 +861,8 @@
       return JSValue::kSize;
     case JS_ARRAY_TYPE:
       return JSValue::kSize;
+    case JS_REGEXP_TYPE:
+      return JSValue::kSize;
     case JS_OBJECT_TYPE:
       return JSObject::kHeaderSize;
     default:
@@ -885,7 +900,7 @@
 
 
 void Struct::InitializeBody(int object_size) {
-  for (int offset = kSize; offset < object_size; offset += kPointerSize) {
+  for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
     WRITE_FIELD(this, offset, Heap::undefined_value());
   }
 }
@@ -977,6 +992,13 @@
 }
 
 
+void FixedArray::set_null(int index) {
+  ASSERT(index >= 0 && index < this->length());
+  ASSERT(!Heap::InNewSpace(Heap::null_value()));
+  WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
+}
+
+
 void FixedArray::set_the_hole(int index) {
   ASSERT(index >= 0 && index < this->length());
   ASSERT(!Heap::InNewSpace(Heap::the_hole_value()));
@@ -1095,6 +1117,7 @@
 CAST_ACCESSOR(Dictionary)
 CAST_ACCESSOR(SymbolTable)
 CAST_ACCESSOR(CompilationCacheTable)
+CAST_ACCESSOR(MapCache)
 CAST_ACCESSOR(String)
 CAST_ACCESSOR(SeqString)
 CAST_ACCESSOR(AsciiString)
@@ -1117,6 +1140,7 @@
 CAST_ACCESSOR(JSBuiltinsObject)
 CAST_ACCESSOR(Code)
 CAST_ACCESSOR(JSArray)
+CAST_ACCESSOR(JSRegExp)
 CAST_ACCESSOR(Proxy)
 CAST_ACCESSOR(ByteArray)
 CAST_ACCESSOR(Struct)
@@ -1191,7 +1215,12 @@
 
 
 void String::TryFlatten() {
-  Flatten();
+  // We don't need to flatten strings that are already flat.  Since this code
+  // is inlined, it can be helpful in the flat case to not call out to Flatten.
+  StringRepresentationTag str_type = representation_tag();
+  if (str_type != kSeqStringTag && str_type != kExternalStringTag) {
+    Flatten();
+  }
 }
 
 
@@ -1993,6 +2022,20 @@
 ACCESSORS(JSArray, length, Object, kLengthOffset)
 
 
+ACCESSORS(JSRegExp, data, Object, kDataOffset)
+ACCESSORS(JSRegExp, type, Object, kTypeOffset)
+
+
+JSRegExp::Type JSRegExp::type_tag() {
+  return static_cast<JSRegExp::Type>(Smi::cast(type())->value());
+}
+
+
+void JSRegExp::set_type_tag(JSRegExp::Type value) {
+  set_type(Smi::FromInt(value));
+}
+
+
 bool JSObject::HasFastElements() {
   return !elements()->IsDictionary();
 }
diff --git a/src/objects.cc b/src/objects.cc
index 471864a..748179d 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -620,6 +620,10 @@
       accumulator->Add("<JS array[%u]>", static_cast<uint32_t>(length));
       break;
     }
+    case JS_REGEXP_TYPE: {
+      accumulator->Add("<JS RegExp>");
+      break;
+    }
     case JS_FUNCTION_TYPE: {
       Object* fun_name = JSFunction::cast(this)->shared()->name();
       bool printed = false;
@@ -819,6 +823,7 @@
     case JS_OBJECT_TYPE:
     case JS_VALUE_TYPE:
     case JS_ARRAY_TYPE:
+    case JS_REGEXP_TYPE:
     case JS_FUNCTION_TYPE:
     case JS_GLOBAL_OBJECT_TYPE:
       reinterpret_cast<JSObject*>(this)->JSObjectIterateBody(object_size, v);
@@ -861,7 +866,7 @@
 
 
 void HeapObject::IterateStructBody(int object_size, ObjectVisitor* v) {
-  IteratePointers(v, HeapObject::kSize, object_size);
+  IteratePointers(v, HeapObject::kHeaderSize, object_size);
 }
 
 
@@ -2379,9 +2384,15 @@
   // First check whether we can update existing code cache without
   // extending it.
   int length = cache->length();
+  int deleted_index = -1;
   for (int i = 0; i < length; i += 2) {
     Object* key = cache->get(i);
+    if (key->IsNull()) {
+      if (deleted_index < 0) deleted_index = i;
+      continue;
+    }
     if (key->IsUndefined()) {
+      if (deleted_index >= 0) i = deleted_index;
       cache->set(i + 0, name);
       cache->set(i + 1, code);
       return this;
@@ -2395,6 +2406,14 @@
     }
   }
 
+  // Reached the end of the code cache.  If there were deleted
+  // elements, reuse the space for the first of them.
+  if (deleted_index >= 0) {
+    cache->set(deleted_index + 0, name);
+    cache->set(deleted_index + 1, code);
+    return this;
+  }
+
   // Extend the code cache with some new entries (at least one).
   int new_length = length + ((length >> 1) & ~1) + 2;
   ASSERT((new_length & 1) == 0);  // must be a multiple of two
@@ -2415,9 +2434,9 @@
   int length = cache->length();
   for (int i = 0; i < length; i += 2) {
     Object* key = cache->get(i);
-    if (key->IsUndefined()) {
-      return key;
-    }
+    // Skip deleted elements.
+    if (key->IsNull()) continue;
+    if (key->IsUndefined()) return key;
     if (name->Equals(String::cast(key))) {
       Code* code = Code::cast(cache->get(i + 1));
       if (code->flags() == flags) return code;
@@ -2440,8 +2459,11 @@
 void Map::RemoveFromCodeCache(int index) {
   FixedArray* array = code_cache();
   ASSERT(array->length() >= index && array->get(index)->IsCode());
-  array->set_undefined(index - 1);  // key
-  array->set_undefined(index);  // code
+  // Use null instead of undefined for deleted elements to distinguish
+  // deleted elements from unused elements.  This distinction is used
+  // when looking up in the cache and when updating the cache.
+  array->set_null(index - 1);  // key
+  array->set_null(index);  // code
 }
 
 
@@ -3901,6 +3923,11 @@
 }
 
 
+Context* JSFunction::GlobalContextFromLiterals(FixedArray* literals) {
+  return Context::cast(literals->get(JSFunction::kLiteralGlobalContextIndex));
+}
+
+
 void Oddball::OddballIterateBody(ObjectVisitor* v) {
   // Assumes all Object* members are contiguously allocated!
   IteratePointers(v, kToStringOffset, kToNumberOffset + kPointerSize);
@@ -3987,13 +4014,13 @@
 
 
 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
-  ASSERT(is_code_target(rinfo->rmode()));
+  ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
   VisitPointer(rinfo->target_object_address());
 }
 
 
 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
-  ASSERT(is_js_return(rinfo->rmode()) && rinfo->is_call_instruction());
+  ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) && rinfo->is_call_instruction());
   VisitPointer(rinfo->call_object_address());
 }
 
@@ -4014,7 +4041,9 @@
   }
 
   if (Debug::has_break_points()) {
-    for (RelocIterator it(this, RelocMask(js_return)); !it.done(); it.next()) {
+    for (RelocIterator it(this, RelocInfo::ModeMask(RelocInfo::JS_RETURN));
+         !it.done();
+         it.next()) {
       if (it.rinfo()->is_call_instruction()) {
         Address addr = it.rinfo()->call_address();
         ASSERT(addr != NULL);
@@ -4032,23 +4061,24 @@
   v->BeginCodeIteration(this);
 
   int mode_mask = RelocInfo::kCodeTargetMask |
-                  RelocMask(embedded_object) |
-                  RelocMask(external_reference) |
-                  RelocMask(js_return) |
-                  RelocMask(runtime_entry);
+                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
+                  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
+                  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
+                  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
 
   for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
-    RelocMode rmode = it.rinfo()->rmode();
-    if (rmode == embedded_object) {
+    RelocInfo::Mode rmode = it.rinfo()->rmode();
+    if (rmode == RelocInfo::EMBEDDED_OBJECT) {
       v->VisitPointer(it.rinfo()->target_object_address());
-    } else if (is_code_target(rmode)) {
+    } else if (RelocInfo::IsCodeTarget(rmode)) {
       v->VisitCodeTarget(it.rinfo());
-    } else if (rmode == external_reference) {
+    } else if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
       v->VisitExternalReference(it.rinfo()->target_reference_address());
     } else if (Debug::has_break_points() &&
-               is_js_return(rmode) && it.rinfo()->is_call_instruction()) {
+               RelocInfo::IsJSReturn(rmode) &&
+               it.rinfo()->is_call_instruction()) {
       v->VisitDebugTarget(it.rinfo());
-    } else if (rmode == runtime_entry) {
+    } else if (rmode == RelocInfo::RUNTIME_ENTRY) {
       v->VisitRuntimeEntry(it.rinfo());
     }
   }
@@ -4073,7 +4103,9 @@
   }
 
   if (Debug::has_break_points()) {
-    for (RelocIterator it(this, RelocMask(js_return)); !it.done(); it.next()) {
+    for (RelocIterator it(this, RelocInfo::ModeMask(RelocInfo::JS_RETURN));
+         !it.done();
+         it.next()) {
       if (it.rinfo()->is_call_instruction()) {
         Code* code = reinterpret_cast<Code*>(it.rinfo()->call_object());
         ASSERT((code != NULL) && code->IsHeapObject());
@@ -4113,14 +4145,14 @@
   // unbox handles and relocate
   int delta = instruction_start() - desc.buffer;
   int mode_mask = RelocInfo::kCodeTargetMask |
-                  RelocMask(embedded_object) |
+                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
                   RelocInfo::kApplyMask;
   for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
-    RelocMode mode = it.rinfo()->rmode();
-    if (mode == embedded_object) {
+    RelocInfo::Mode mode = it.rinfo()->rmode();
+    if (mode == RelocInfo::EMBEDDED_OBJECT) {
       Object** p = reinterpret_cast<Object**>(it.rinfo()->target_object());
       it.rinfo()->set_target_object(*p);
-    } else if (is_code_target(mode)) {
+    } else if (RelocInfo::IsCodeTarget(mode)) {
       // rewrite code handles in inline cache targets to direct
       // pointers to the first instruction in the code object
       Object** p = reinterpret_cast<Object**>(it.rinfo()->target_object());
@@ -4140,16 +4172,26 @@
 // source for this function is found.
 int Code::SourcePosition(Address pc) {
   int distance = kMaxInt;
-  int position = kNoPosition;  // Initially no position found.
+  int position = RelocInfo::kNoPosition;  // Initially no position found.
   // Run through all the relocation info to find the best matching source
   // position. All the code needs to be considered as the sequence of the
   // instructions in the code does not necessarily follow the same order as the
   // source.
   RelocIterator it(this, RelocInfo::kPositionMask);
   while (!it.done()) {
-    if (it.rinfo()->pc() < pc && (pc - it.rinfo()->pc()) < distance) {
-      position = it.rinfo()->data();
-      distance = pc - it.rinfo()->pc();
+    // Only look at positions after the current pc.
+    if (it.rinfo()->pc() < pc) {
+      // Get position and distance.
+      int dist = pc - it.rinfo()->pc();
+      int pos = it.rinfo()->data();
+      // If this position is closer than the current candidate or if it has the
+      // same distance as the current candidate and the position is higher then
+      // this position is the new candidate.
+      if ((dist < distance) ||
+          (dist == distance && pos > position)) {
+        position = pos;
+        distance = dist;
+      }
     }
     it.next();
   }
@@ -4167,7 +4209,7 @@
   int statement_position = 0;
   RelocIterator it(this, RelocInfo::kPositionMask);
   while (!it.done()) {
-    if (is_statement_position(it.rinfo()->rmode())) {
+    if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) {
       int p = it.rinfo()->data();
       if (statement_position < p && p <= position) {
         statement_position = p;
@@ -5496,12 +5538,12 @@
 
   Object* obj = Allocate(nof * 2);
   if (obj->IsFailure()) return obj;
-  HashTable* dict = HashTable::cast(obj);
-  WriteBarrierMode mode = dict->GetWriteBarrierMode();
+  HashTable* table = HashTable::cast(obj);
+  WriteBarrierMode mode = table->GetWriteBarrierMode();
 
   // Copy prefix to new array.
   for (int i = kPrefixStartIndex; i < kPrefixStartIndex + prefix_size; i++) {
-    dict->set(i, get(i), mode);
+    table->set(i, get(i), mode);
   }
   // Rehash the elements.
   uint32_t (*Hash)(Object* key) = key->GetHashFunction();
@@ -5510,14 +5552,14 @@
     Object* key = get(from_index);
     if (IsKey(key)) {
       uint32_t insertion_index =
-          EntryToIndex(dict->FindInsertionEntry(key, Hash(key)));
+          EntryToIndex(table->FindInsertionEntry(key, Hash(key)));
       for (int j = 0; j < element_size; j++) {
-        dict->set(insertion_index + j, get(from_index + j), mode);
+        table->set(insertion_index + j, get(from_index + j), mode);
       }
     }
   }
-  dict->SetNumberOfElements(NumberOfElements());
-  return dict;
+  table->SetNumberOfElements(NumberOfElements());
+  return table;
 }
 
 
@@ -5619,6 +5661,70 @@
 }
 
 
+// SymbolsKey used for HashTable where key is array of symbols.
+class SymbolsKey : public HashTableKey {
+ public:
+  explicit SymbolsKey(FixedArray* symbols) {
+    symbols_ = symbols;
+  }
+
+  bool IsMatch(Object* other) {
+    if (!other->IsFixedArray()) return false;
+    FixedArray* o = FixedArray::cast(other);
+    int len = symbols_->length();
+    if (o->length() != len) return false;
+    for (int i = 0; i < len; i++) {
+      if (o->get(i) != symbols_->get(i)) return false;
+    }
+    return true;
+  }
+
+  uint32_t Hash() { return SymbolsHash(symbols_); }
+
+  HashFunction GetHashFunction() { return SymbolsHash; }
+
+  Object* GetObject() { return symbols_; }
+
+  static uint32_t SymbolsHash(Object* obj) {
+    FixedArray* symbols_ = FixedArray::cast(obj);
+    int len = symbols_->length();
+    uint32_t  hash = 0;
+    for (int i = 0; i < len; i++) {
+      hash ^= String::cast(symbols_->get(i))->Hash();
+    }
+    return hash;
+  }
+
+  bool IsStringKey() { return false; }
+
+  FixedArray* symbols_;
+};
+
+Object* MapCache::Lookup(FixedArray* array) {
+  SymbolsKey key(array);
+  int entry = FindEntry(&key);
+  if (entry != -1) {
+    return get(EntryToIndex(entry) + 1);
+  } else {
+    return Heap::undefined_value();
+  }
+}
+
+
+Object* MapCache::Put(FixedArray* array, Map* value) {
+  SymbolsKey key(array);
+  Object* obj = EnsureCapacity(1, &key);
+  if (obj->IsFailure()) return obj;
+
+  MapCache* cache = reinterpret_cast<MapCache*>(obj);
+  int entry = cache->FindInsertionEntry(array, key.Hash());
+  cache->set(EntryToIndex(entry), array);
+  cache->set(EntryToIndex(entry) + 1, value);
+  cache->ElementAdded();
+  return cache;
+}
+
+
 Object* Dictionary::Allocate(int at_least_space_for) {
   Object* obj = DictionaryBase::Allocate(at_least_space_for);
   // Initialize the next enumeration index.
diff --git a/src/objects.h b/src/objects.h
index 0957a9f..85a8122 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -43,6 +43,7 @@
 //     - HeapObject   (superclass for everything allocated in the heap)
 //       - JSObject
 //         - JSArray
+//         - JSRegExp
 //         - JSFunction
 //         - GlobalObject
 //           - JSGlobalObject
@@ -263,6 +264,7 @@
   V(JS_GLOBAL_OBJECT_TYPE)                      \
   V(JS_BUILTINS_OBJECT_TYPE)                    \
   V(JS_ARRAY_TYPE)                              \
+  V(JS_REGEXP_TYPE)                             \
                                                 \
   V(JS_FUNCTION_TYPE)                           \
 
@@ -517,6 +519,7 @@
   JS_GLOBAL_OBJECT_TYPE,
   JS_BUILTINS_OBJECT_TYPE,
   JS_ARRAY_TYPE,
+  JS_REGEXP_TYPE,
 
   JS_FUNCTION_TYPE,
 
@@ -528,7 +531,7 @@
   // function objects are not counted as objects, even though they are
   // implemented as such; only values whose typeof is "object" are included.
   FIRST_JS_OBJECT_TYPE = JS_VALUE_TYPE,
-  LAST_JS_OBJECT_TYPE = JS_ARRAY_TYPE
+  LAST_JS_OBJECT_TYPE = JS_REGEXP_TYPE
 };
 
 
@@ -611,10 +614,12 @@
   inline bool IsProxy();
   inline bool IsBoolean();
   inline bool IsJSArray();
+  inline bool IsJSRegExp();
   inline bool IsHashTable();
   inline bool IsDictionary();
   inline bool IsSymbolTable();
   inline bool IsCompilationCacheTable();
+  inline bool IsMapCache();
   inline bool IsPrimitive();
   inline bool IsGlobalObject();
   inline bool IsJSGlobalObject();
@@ -700,7 +705,7 @@
   static Object* cast(Object* value) { return value; }
 
   // Layout description.
-  static const int kSize = 0;  // Object does not take up any space.
+  static const int kHeaderSize = 0;  // Object does not take up any space.
 
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(Object);
@@ -1042,8 +1047,8 @@
 
   // Layout description.
   // First field in a heap object is map.
-  static const int kMapOffset = Object::kSize;
-  static const int kSize = kMapOffset + kPointerSize;
+  static const int kMapOffset = Object::kHeaderSize;
+  static const int kHeaderSize = kMapOffset + kPointerSize;
 
  protected:
   // helpers for calling an ObjectVisitor to iterate over pointers in the
@@ -1081,7 +1086,7 @@
 #endif
 
   // Layout description.
-  static const int kValueOffset = HeapObject::kSize;
+  static const int kValueOffset = HeapObject::kHeaderSize;
   static const int kSize = kValueOffset + kDoubleSize;
 
  private:
@@ -1371,7 +1376,7 @@
   static const int kMaxFastProperties = 8;
 
   // Layout description.
-  static const int kPropertiesOffset = HeapObject::kSize;
+  static const int kPropertiesOffset = HeapObject::kHeaderSize;
   static const int kElementsOffset = kPropertiesOffset + kPointerSize;
   static const int kHeaderSize = kElementsOffset + kPointerSize;
 
@@ -1423,7 +1428,7 @@
   static inline bool IndexFromObject(Object* object, uint32_t* index);
 
   // Layout descriptor.
-  static const int kLengthOffset = HeapObject::kSize;
+  static const int kLengthOffset = HeapObject::kHeaderSize;
   static const int kHeaderSize = kLengthOffset + kIntSize;
 
  private:
@@ -1443,6 +1448,7 @@
 
   // Setters for frequently used oddballs located in old space.
   inline void set_undefined(int index);
+  inline void set_null(int index);
   inline void set_the_hole(int index);
 
   // Setter that skips the write barrier if mode is SKIP_WRITE_BARRIER.
@@ -1831,6 +1837,22 @@
 };
 
 
+// MapCache.
+//
+// Maps keys that are a fixed array of symbols to a map.
+// Used for canonicalize maps for object literals.
+class MapCache: public HashTable<0, 2> {
+ public:
+  // Find cached value for a string key, otherwise return null.
+  Object* Lookup(FixedArray* key);
+  Object* Put(FixedArray* key, Map* value);
+  static inline MapCache* cast(Object* obj);
+
+ private:
+  DISALLOW_IMPLICIT_CONSTRUCTORS(MapCache);
+};
+
+
 // Dictionary for keeping properties and elements in slow case.
 //
 // One element in the prefix is used for storing non-element
@@ -2179,7 +2201,7 @@
 #endif
 
   // Layout description.
-  static const int kInstructionSizeOffset = HeapObject::kSize;
+  static const int kInstructionSizeOffset = HeapObject::kHeaderSize;
   static const int kRelocationSizeOffset = kInstructionSizeOffset + kIntSize;
   static const int kSInfoSizeOffset = kRelocationSizeOffset + kIntSize;
   static const int kFlagsOffset = kSInfoSizeOffset + kIntSize;
@@ -2359,7 +2381,7 @@
 #endif
 
   // Layout description.
-  static const int kInstanceAttributesOffset = HeapObject::kSize;
+  static const int kInstanceAttributesOffset = HeapObject::kHeaderSize;
   static const int kPrototypeOffset = kInstanceAttributesOffset + kIntSize;
   static const int kConstructorOffset = kPrototypeOffset + kPointerSize;
   static const int kInstanceDescriptorsOffset =
@@ -2434,7 +2456,7 @@
   void ScriptVerify();
 #endif
 
-  static const int kSourceOffset = HeapObject::kSize;
+  static const int kSourceOffset = HeapObject::kHeaderSize;
   static const int kNameOffset = kSourceOffset + kPointerSize;
   static const int kLineOffsetOffset = kNameOffset + kPointerSize;
   static const int kColumnOffsetOffset = kLineOffsetOffset + kPointerSize;
@@ -2546,7 +2568,7 @@
   static const int kDontAdaptArgumentsSentinel = -1;
 
   // Layout description.
-  static const int kNameOffset = HeapObject::kSize;
+  static const int kNameOffset = HeapObject::kHeaderSize;
   static const int kCodeOffset = kNameOffset + kPointerSize;
   static const int kLengthOffset = kCodeOffset + kPointerSize;
   static const int kFormalParameterCountOffset = kLengthOffset + kIntSize;
@@ -2666,6 +2688,9 @@
   // Returns the number of allocated literals.
   int NumberOfLiterals();
 
+  // Retrieve the global context from a function's literal array.
+  static Context* GlobalContextFromLiterals(FixedArray* literals);
+
   // Layout descriptors.
   static const int kPrototypeOrInitialMapOffset = JSObject::kHeaderSize;
   static const int kSharedFunctionInfoOffset =
@@ -2675,11 +2700,8 @@
   static const int kSize = kLiteralsOffset + kPointerSize;
 
   // Layout of the literals array.
-  static const int kLiteralsPrefixSize = 3;
-  static const int kLiteralObjectFunctionIndex = 0;
-  static const int kLiteralRegExpFunctionIndex = 1;
-  static const int kLiteralArrayFunctionIndex = 2;
-
+  static const int kLiteralsPrefixSize = 1;
+  static const int kLiteralGlobalContextIndex = 0;
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSFunction);
 };
@@ -2787,6 +2809,31 @@
 };
 
 
+// Regular expressions
+class JSRegExp: public JSObject {
+ public:
+  enum Type { JSCRE, INDEX_OF };
+
+  inline Type type_tag();
+  inline void set_type_tag(Type value);
+
+  DECL_ACCESSORS(type, Object)
+  DECL_ACCESSORS(data, Object)
+
+  static inline JSRegExp* cast(Object* obj);
+
+  // Dispatched behavior.
+#ifdef DEBUG
+  void JSRegExpPrint();
+  void JSRegExpVerify();
+#endif
+
+  static const int kTypeOffset = JSObject::kHeaderSize;
+  static const int kDataOffset = kTypeOffset + kIntSize;
+  static const int kSize = kDataOffset + kIntSize;
+};
+
+
 enum AllowNullsFlag {ALLOW_NULLS, DISALLOW_NULLS};
 enum RobustnessFlag {ROBUST_STRING_TRAVERSAL, FAST_STRING_TRAVERSAL};
 
@@ -2923,7 +2970,7 @@
   inline bool IsFlat();
 
   // Layout description.
-  static const int kLengthOffset = HeapObject::kSize;
+  static const int kLengthOffset = HeapObject::kHeaderSize;
   static const int kSize = kLengthOffset + kIntSize;
 
   // Limits on sizes of different types of strings.
@@ -3343,7 +3390,7 @@
   Object* Initialize(const char* to_string, Object* to_number);
 
   // Layout description.
-  static const int kToStringOffset = HeapObject::kSize;
+  static const int kToStringOffset = HeapObject::kHeaderSize;
   static const int kToNumberOffset = kToStringOffset + kPointerSize;
   static const int kSize = kToNumberOffset + kPointerSize;
 
@@ -3373,7 +3420,7 @@
 
   // Layout description.
 
-  static const int kProxyOffset = HeapObject::kSize;
+  static const int kProxyOffset = HeapObject::kHeaderSize;
   static const int kSize = kProxyOffset + kPointerSize;
 
  private:
@@ -3456,7 +3503,7 @@
   void AccessorInfoVerify();
 #endif
 
-  static const int kGetterOffset = HeapObject::kSize;
+  static const int kGetterOffset = HeapObject::kHeaderSize;
   static const int kSetterOffset = kGetterOffset + kPointerSize;
   static const int kDataOffset = kSetterOffset + kPointerSize;
   static const int kNameOffset = kDataOffset + kPointerSize;
@@ -3486,7 +3533,7 @@
   void AccessCheckInfoVerify();
 #endif
 
-  static const int kNamedCallbackOffset   = HeapObject::kSize;
+  static const int kNamedCallbackOffset   = HeapObject::kHeaderSize;
   static const int kIndexedCallbackOffset = kNamedCallbackOffset + kPointerSize;
   static const int kDataOffset = kIndexedCallbackOffset + kPointerSize;
   static const int kSize = kDataOffset + kPointerSize;
@@ -3512,7 +3559,7 @@
   void InterceptorInfoVerify();
 #endif
 
-  static const int kGetterOffset = HeapObject::kSize;
+  static const int kGetterOffset = HeapObject::kHeaderSize;
   static const int kSetterOffset = kGetterOffset + kPointerSize;
   static const int kQueryOffset = kSetterOffset + kPointerSize;
   static const int kDeleterOffset = kQueryOffset + kPointerSize;
@@ -3537,7 +3584,7 @@
   void CallHandlerInfoVerify();
 #endif
 
-  static const int kCallbackOffset = HeapObject::kSize;
+  static const int kCallbackOffset = HeapObject::kHeaderSize;
   static const int kDataOffset = kCallbackOffset + kPointerSize;
   static const int kSize = kDataOffset + kPointerSize;
 
@@ -3555,7 +3602,7 @@
   void TemplateInfoVerify();
 #endif
 
-  static const int kTagOffset          = HeapObject::kSize;
+  static const int kTagOffset          = HeapObject::kHeaderSize;
   static const int kPropertyListOffset = kTagOffset + kPointerSize;
   static const int kHeaderSize         = kPropertyListOffset + kPointerSize;
  protected:
@@ -3656,7 +3703,7 @@
   void SignatureInfoVerify();
 #endif
 
-  static const int kReceiverOffset = Struct::kSize;
+  static const int kReceiverOffset = Struct::kHeaderSize;
   static const int kArgsOffset     = kReceiverOffset + kPointerSize;
   static const int kSize           = kArgsOffset + kPointerSize;
 
@@ -3676,7 +3723,7 @@
   void TypeSwitchInfoVerify();
 #endif
 
-  static const int kTypesOffset = Struct::kSize;
+  static const int kTypesOffset = Struct::kHeaderSize;
   static const int kSize        = kTypesOffset + kPointerSize;
 };
 
@@ -3722,7 +3769,7 @@
   void DebugInfoVerify();
 #endif
 
-  static const int kSharedFunctionInfoIndex = Struct::kSize;
+  static const int kSharedFunctionInfoIndex = Struct::kHeaderSize;
   static const int kOriginalCodeIndex = kSharedFunctionInfoIndex + kPointerSize;
   static const int kPatchedCodeIndex = kOriginalCodeIndex + kPointerSize;
   static const int kActiveBreakPointsCountIndex =
@@ -3775,7 +3822,7 @@
   void BreakPointInfoVerify();
 #endif
 
-  static const int kCodePositionIndex = Struct::kSize;
+  static const int kCodePositionIndex = Struct::kHeaderSize;
   static const int kSourcePositionIndex = kCodePositionIndex + kPointerSize;
   static const int kStatementPositionIndex =
       kSourcePositionIndex + kPointerSize;
diff --git a/src/parser.cc b/src/parser.cc
index b385f16..c39b4b6 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -811,7 +811,7 @@
 
     FunctionLiteralType type = is_expression ? EXPRESSION : DECLARATION;
     bool ok = true;
-    result = ParseFunctionLiteral(name, kNoPosition, type, &ok);
+    result = ParseFunctionLiteral(name, RelocInfo::kNoPosition, type, &ok);
     // Make sure the results agree.
     ASSERT(ok == (result != NULL));
     // The only errors should be stack overflows.
@@ -1148,7 +1148,7 @@
       NEW(FunctionBoilerplateLiteral(boilerplate));
   VariableProxy* var = Declare(name, Variable::VAR, NULL, true, CHECK_OK);
   return NEW(ExpressionStatement(
-                 new Assignment(Token::INIT_VAR, var, lit, kNoPosition)));
+      new Assignment(Token::INIT_VAR, var, lit, RelocInfo::kNoPosition)));
 }
 
 
@@ -2689,7 +2689,8 @@
           if (peek() == Token::IDENTIFIER) {
             Handle<String> name = ParseIdentifier(CHECK_OK);
             FunctionLiteral* value =
-                ParseFunctionLiteral(name, kNoPosition, DECLARATION, CHECK_OK);
+                ParseFunctionLiteral(name, RelocInfo::kNoPosition,
+                                     DECLARATION, CHECK_OK);
             ObjectLiteral::Property* property =
                 NEW(ObjectLiteral::Property(is_getter, value));
             if (IsBoilerplateProperty(property))
@@ -2737,8 +2738,7 @@
         NEW(ObjectLiteral::Property(key, value));
 
     // Count CONSTANT or COMPUTED properties to maintain the enumeration order.
-    if (IsBoilerplateProperty(property))
-      number_of_boilerplate_properties++;
+    if (IsBoilerplateProperty(property)) number_of_boilerplate_properties++;
     properties.Add(property);
 
     // TODO(1240767): Consider allowing trailing comma.
@@ -2885,7 +2885,8 @@
       fproxy->BindTo(fvar);
       body.Add(new ExpressionStatement(
                    new Assignment(Token::INIT_VAR, fproxy,
-                                  NEW(ThisFunction()), kNoPosition)));
+                                  NEW(ThisFunction()),
+                                  RelocInfo::kNoPosition)));
     }
 
     // Determine if the function will be lazily compiled. The mode can
diff --git a/src/platform-linux.cc b/src/platform-linux.cc
index 9589bf9..46ca7dc 100644
--- a/src/platform-linux.cc
+++ b/src/platform-linux.cc
@@ -38,11 +38,12 @@
 // executable. Otherwise, OS raises an exception when executing code
 // in that page.
 #include <sys/types.h>  // mmap & munmap
-#include <sys/mman.h>  // mmap & munmap
-#include <sys/stat.h>  // open
+#include <sys/mman.h>   // mmap & munmap
+#include <sys/stat.h>   // open
 #include <sys/fcntl.h>  // open
-#include <unistd.h>  // getpagesize
-#include <execinfo.h>  // backtrace, backtrace_symbols
+#include <unistd.h>     // getpagesize
+#include <execinfo.h>   // backtrace, backtrace_symbols
+#include <strings.h>    // index
 #include <errno.h>
 #include <stdarg.h>
 
@@ -194,7 +195,16 @@
 }
 
 
-double OS::nan_value() { return NAN; }
+double OS::nan_value() {
+  return NAN;
+}
+
+
+int OS::ActivationFrameAlignment() {
+  // No constraint on Linux.
+  return 0;
+}
+
 
 // We keep the lowest and highest addresses mapped as a quick way of
 // determining that pointers are outside the heap (used mostly in assertions
@@ -335,12 +345,13 @@
       if (result < 1) break;
     } while (buffer[bytes_read] != '\n');
     buffer[bytes_read] = 0;
-    // There are 56 chars to ignore at this point in the line.
-    if (bytes_read < 56) continue;
     // Ignore mappings that are not executable.
     if (buffer[3] != 'x') continue;
+    char* start_of_path = index(buffer, '/');
+    // There may be no filename in this line.  Skip to next.
+    if (start_of_path == NULL) continue;
     buffer[bytes_read] = 0;
-    LOG(SharedLibraryEvent(buffer + 56, start, end));
+    LOG(SharedLibraryEvent(start_of_path, start, end));
   }
   close(fd);
 #endif
diff --git a/src/platform-macos.cc b/src/platform-macos.cc
index 54aad6a..e57f79d 100644
--- a/src/platform-macos.cc
+++ b/src/platform-macos.cc
@@ -300,7 +300,16 @@
 }
 
 
-double OS::nan_value() { return NAN; }
+double OS::nan_value() {
+  return NAN;
+}
+
+
+int OS::ActivationFrameAlignment() {
+  // OS X activation frames must be 16 byte-aligned; see "Mac OS X ABI
+  // Function Call Guide".
+  return 16;
+}
 
 
 int OS::StackWalk(StackFrame* frames, int frames_size) {
diff --git a/src/platform-win32.cc b/src/platform-win32.cc
index 747f0a4..4fdbf78 100644
--- a/src/platform-win32.cc
+++ b/src/platform-win32.cc
@@ -1206,6 +1206,13 @@
   return *reinterpret_cast<const double*>(&nanval);
 }
 
+
+int OS::ActivationFrameAlignment() {
+  // No constraint on Windows.
+  return 0;
+}
+
+
 bool VirtualMemory::IsReserved() {
   return address_ != NULL;
 }
diff --git a/src/platform.h b/src/platform.h
index 5bb7f20..0e33fa8 100644
--- a/src/platform.h
+++ b/src/platform.h
@@ -217,6 +217,10 @@
   // Returns the double constant NAN
   static double nan_value();
 
+  // Returns the activation frame alignment constraint or zero if
+  // the platform doesn't care. Guaranteed to be a power of two.
+  static int ActivationFrameAlignment();
+
  private:
   static const int msPerSecond = 1000;
 
diff --git a/src/rewriter.cc b/src/rewriter.cc
index 6954b23..ebbc383 100644
--- a/src/rewriter.cc
+++ b/src/rewriter.cc
@@ -64,7 +64,8 @@
 
   Expression* SetResult(Expression* value) {
     result_assigned_ = true;
-    return new Assignment(Token::ASSIGN, result_, value, kNoPosition);
+    return new Assignment(Token::ASSIGN, result_, value,
+                          RelocInfo::kNoPosition);
   }
 
   // Node visitors.
diff --git a/src/runtime.cc b/src/runtime.cc
index c579a20..9b79145 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -97,6 +97,37 @@
 }
 
 
+static Handle<Map> ComputeObjectLiteralMap(
+    Handle<Context> context,
+    Handle<FixedArray> constant_properties,
+    bool &is_result_from_cache) {
+  if (FLAG_canonicalize_object_literal_maps) {
+    // First find prefix of consecutive symbol keys.
+    int number_of_properties = constant_properties->length()/2;
+    int number_of_symbol_keys = 0;
+    while ((number_of_symbol_keys < number_of_properties) &&
+           (constant_properties->get(number_of_symbol_keys*2)->IsSymbol())) {
+      number_of_symbol_keys++;
+    }
+    // Based on the number of prefix symbols key we decide whether
+    // to use the map cache in the global context.
+    const int kMaxKeys = 10;
+    if ((number_of_symbol_keys == number_of_properties)
+        && (number_of_symbol_keys < kMaxKeys)) {
+      // Create the fixed array with the key.
+      Handle<FixedArray> keys = Factory::NewFixedArray(number_of_symbol_keys);
+      for (int i = 0; i < number_of_symbol_keys; i++) {
+        keys->set(i, constant_properties->get(i*2));
+      }
+      is_result_from_cache = true;
+      return Factory::ObjectLiteralMapFromCache(context, keys);
+    }
+  }
+  is_result_from_cache = false;
+  return Handle<Map>(context->object_function()->initial_map());
+}
+
+
 static Object* Runtime_CreateObjectLiteralBoilerplate(Arguments args) {
   HandleScope scope;
   ASSERT(args.length() == 3);
@@ -104,21 +135,24 @@
   Handle<FixedArray> literals = args.at<FixedArray>(0);
   int literals_index = Smi::cast(args[1])->value();
   Handle<FixedArray> constant_properties = args.at<FixedArray>(2);
+  Handle<Context> context =
+      Handle<Context>(JSFunction::GlobalContextFromLiterals(*literals));
+
+  bool is_result_from_cache;
+  Handle<Map> map = ComputeObjectLiteralMap(context,
+                                            constant_properties,
+                                            is_result_from_cache);
 
   // Get the object function from the literals array.  This is the
   // object function from the context in which the function was
   // created.  We do not use the object function from the current
   // global context because this might be the object function from
   // another context which we should not have access to.
-  const int kObjectFunIndex = JSFunction::kLiteralObjectFunctionIndex;
-  Handle<JSFunction> constructor =
-      Handle<JSFunction>(JSFunction::cast(literals->get(kObjectFunIndex)));
-
-  Handle<JSObject> boilerplate = Factory::NewJSObject(constructor, TENURED);
-
+  Handle<JSObject> boilerplate = Factory::NewJSObjectFromMap(map);
   {  // Add the constant propeties to the boilerplate.
     int length = constant_properties->length();
-    OptimizedObjectForAddingMultipleProperties opt(boilerplate, true);
+    OptimizedObjectForAddingMultipleProperties opt(boilerplate,
+                                                   !is_result_from_cache);
     for (int index = 0; index < length; index +=2) {
       Handle<Object> key(constant_properties->get(index+0));
       Handle<Object> value(constant_properties->get(index+1));
@@ -160,9 +194,8 @@
   ASSERT(args.length() == 2);
   CONVERT_CHECKED(FixedArray, elements, args[0]);
   CONVERT_CHECKED(FixedArray, literals, args[1]);
-  const int kArrayFunIndex = JSFunction::kLiteralArrayFunctionIndex;
-  JSFunction* constructor = JSFunction::cast(literals->get(kArrayFunIndex));
-
+  JSFunction* constructor =
+      JSFunction::GlobalContextFromLiterals(literals)->array_function();
   // Create the JSArray.
   Object* object = Heap::AllocateJSObject(constructor);
   if (object->IsFailure()) return object;
@@ -212,8 +245,8 @@
 static Object* Runtime_RegExpCompile(Arguments args) {
   HandleScope scope;  // create a new handle scope
   ASSERT(args.length() == 3);
-  CONVERT_CHECKED(JSValue, raw_re, args[0]);
-  Handle<JSValue> re(raw_re);
+  CONVERT_CHECKED(JSRegExp, raw_re, args[0]);
+  Handle<JSRegExp> re(raw_re);
   CONVERT_CHECKED(String, raw_pattern, args[1]);
   Handle<String> pattern(raw_pattern);
   CONVERT_CHECKED(String, raw_flags, args[2]);
@@ -665,8 +698,8 @@
 static Object* Runtime_RegExpExec(Arguments args) {
   HandleScope scope;
   ASSERT(args.length() == 3);
-  CONVERT_CHECKED(JSValue, raw_regexp, args[0]);
-  Handle<JSValue> regexp(raw_regexp);
+  CONVERT_CHECKED(JSRegExp, raw_regexp, args[0]);
+  Handle<JSRegExp> regexp(raw_regexp);
   CONVERT_CHECKED(String, raw_subject, args[1]);
   Handle<String> subject(raw_subject);
   Handle<Object> index(args[2]);
@@ -678,8 +711,8 @@
 static Object* Runtime_RegExpExecGlobal(Arguments args) {
   HandleScope scope;
   ASSERT(args.length() == 2);
-  CONVERT_CHECKED(JSValue, raw_regexp, args[0]);
-  Handle<JSValue> regexp(raw_regexp);
+  CONVERT_CHECKED(JSRegExp, raw_regexp, args[0]);
+  Handle<JSRegExp> regexp(raw_regexp);
   CONVERT_CHECKED(String, raw_subject, args[1]);
   Handle<String> subject(raw_subject);
   return *RegExpImpl::JsreExecGlobal(regexp, subject);
@@ -699,10 +732,9 @@
   // created.  We do not use the RegExp function from the current
   // global context because this might be the RegExp function from
   // another context which we should not have access to.
-  const int kRegexpFunIndex = JSFunction::kLiteralRegExpFunctionIndex;
   Handle<JSFunction> constructor =
-      Handle<JSFunction>(JSFunction::cast(literals->get(kRegexpFunIndex)));
-
+      Handle<JSFunction>(
+          JSFunction::GlobalContextFromLiterals(*literals)->regexp_function());
   // Compute the regular expression literal.
   bool has_pending_exception;
   Handle<Object> regexp =
@@ -726,6 +758,17 @@
 }
 
 
+static Object* Runtime_FunctionSetName(Arguments args) {
+  NoHandleAllocation ha;
+  ASSERT(args.length() == 2);
+
+  CONVERT_CHECKED(JSFunction, f, args[0]);
+  CONVERT_CHECKED(String, name, args[1]);
+  f->shared()->set_name(name);
+  return Heap::undefined_value();
+}
+
+
 static Object* Runtime_FunctionGetScript(Arguments args) {
   HandleScope scope;
   ASSERT(args.length() == 1);
@@ -828,12 +871,8 @@
       // Insert the object, regexp and array functions in the literals
       // array prefix.  These are the functions that will be used when
       // creating object, regexp and array literals.
-      literals->set(JSFunction::kLiteralObjectFunctionIndex,
-                    context->global_context()->object_function());
-      literals->set(JSFunction::kLiteralRegExpFunctionIndex,
-                    context->global_context()->regexp_function());
-      literals->set(JSFunction::kLiteralArrayFunctionIndex,
-                    context->global_context()->array_function());
+      literals->set(JSFunction::kLiteralGlobalContextIndex,
+                    context->global_context());
     }
     target->set_literals(*literals);
   }
@@ -911,12 +950,12 @@
   CONVERT_CHECKED(String, pat, args[1]);
   Object* index = args[2];
 
-  int subject_length = sub->length();
-  int pattern_length = pat->length();
-
   sub->TryFlatten();
   pat->TryFlatten();
 
+  int subject_length = sub->length();
+  int pattern_length = pat->length();
+
   uint32_t start_index;
   if (!Array::IndexFromObject(index, &start_index)) return Smi::FromInt(-1);
   if (pattern_length == 0) return Smi::FromInt(start_index);
@@ -934,8 +973,23 @@
     return Smi::FromInt(-1);
   }
 
-  // For patterns with a length larger than one character we use the KMP
-  // algorithm.
+  // For small searches, KMP is not worth the setup overhead.
+  if (subject_length < 100) {
+    // We know our pattern is at least 2 characters, we cache the first so
+    // the common case of the first character not matching is faster.
+    uint16_t pattern_first_char = pat->Get(0);
+    for (int i = start_index; i + pattern_length <= subject_length; i++) {
+      if (sub->Get(i) != pattern_first_char) continue;
+
+      for (int j = 1; j < pattern_length; j++) {
+        if (pat->Get(j) != sub->Get(j + i)) break;
+        if (j == pattern_length - 1) return Smi::FromInt(i);
+      }
+    }
+    return Smi::FromInt(-1);
+  }
+
+  // For patterns with a larger length we use the KMP algorithm.
   //
   // Compute the 'next' table.
   int* next_table = NewArray<int>(pattern_length);
@@ -3207,6 +3261,7 @@
   args[0]->ShortPrint();
 #endif
   PrintF("\n");
+  Flush();
 
   return args[0];  // return TOS
 }
@@ -3345,10 +3400,11 @@
 
 static Object* Runtime_CompileString(Arguments args) {
   HandleScope scope;
-  ASSERT(args.length() == 2);
+  ASSERT(args.length() == 3);
   CONVERT_ARG_CHECKED(String, source, 0);
-  bool contextual = args[1]->IsTrue();
-  RUNTIME_ASSERT(contextual || args[1]->IsFalse());
+  CONVERT_ARG_CHECKED(Smi, line_offset, 1);
+  bool contextual = args[2]->IsTrue();
+  RUNTIME_ASSERT(contextual || args[2]->IsFalse());
 
   // Compute the eval context.
   Handle<Context> context;
@@ -3367,7 +3423,7 @@
   // Compile source string.
   bool is_global = context->IsGlobalContext();
   Handle<JSFunction> boilerplate =
-      Compiler::CompileEval(is_global, source);
+      Compiler::CompileEval(source, line_offset->value(), is_global);
   if (boilerplate.is_null()) return Failure::Exception();
   Handle<JSFunction> fun =
       Factory::NewFunctionFromBoilerplate(boilerplate, context);
@@ -3983,7 +4039,7 @@
                Smi::FromInt(info.NumberOfLocals()));
 
   // Add the source position.
-  if (position != kNoPosition) {
+  if (position != RelocInfo::kNoPosition) {
     details->set(kFrameDetailsSourcePositionIndex, Smi::FromInt(position));
   } else {
     details->set(kFrameDetailsSourcePositionIndex, Heap::undefined_value());
@@ -4130,7 +4186,7 @@
   // these functions.
   bool done = false;
   // The current candidate for the source position:
-  int target_start_position = kNoPosition;
+  int target_start_position = RelocInfo::kNoPosition;
   Handle<SharedFunctionInfo> target;
   // The current candidate for the last function in script:
   Handle<SharedFunctionInfo> last;
@@ -4145,7 +4201,7 @@
           // If the SharedFunctionInfo found has the requested script data and
           // contains the source position it is a candidate.
           int start_position = shared->function_token_position();
-          if (start_position == kNoPosition) {
+          if (start_position == RelocInfo::kNoPosition) {
             start_position = shared->start_position();
           }
           if (start_position <= position &&
@@ -4486,7 +4542,7 @@
       Factory::NewStringFromAscii(Vector<const char>(source_str,
                                                      source_str_length));
   Handle<JSFunction> boilerplate =
-      Compiler::CompileEval(context->IsGlobalContext(), function_source);
+      Compiler::CompileEval(function_source, 0, context->IsGlobalContext());
   if (boilerplate.is_null()) return Failure::Exception();
   Handle<JSFunction> compiled_function =
       Factory::NewFunctionFromBoilerplate(boilerplate, context);
@@ -4542,7 +4598,7 @@
   Handle<Context> context = Top::global_context();
 
   // Compile the source to be evaluated.
-  Handle<JSFunction> boilerplate(Compiler::CompileEval(true, source));
+  Handle<JSFunction> boilerplate(Compiler::CompileEval(source, 0, true));
   if (boilerplate.is_null()) return Failure::Exception();
   Handle<JSFunction> compiled_function =
       Handle<JSFunction>(Factory::NewFunctionFromBoilerplate(boilerplate,
diff --git a/src/runtime.h b/src/runtime.h
index 794a5ca..d78341e 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -157,6 +157,7 @@
   F(FunctionSetLength, 2) \
   F(FunctionSetPrototype, 2) \
   F(FunctionGetName, 1) \
+  F(FunctionSetName, 2) \
   F(FunctionGetSourceCode, 1) \
   F(FunctionGetScript, 1) \
   F(FunctionGetScriptSourcePosition, 1) \
@@ -180,7 +181,7 @@
   F(NumberIsFinite, 1) \
   \
   /* Globals */ \
-  F(CompileString, 2) \
+  F(CompileString, 3) \
   F(CompileScript, 4) \
   F(GlobalPrint, 1) \
   \
diff --git a/src/scopes.cc b/src/scopes.cc
index 88a6820..4f84bbb 100644
--- a/src/scopes.cc
+++ b/src/scopes.cc
@@ -772,7 +772,7 @@
         var->rewrite_ =
           new Property(arguments_shadow_,
                        new Literal(Handle<Object>(Smi::FromInt(i))),
-                       kNoPosition);
+                       RelocInfo::kNoPosition);
         arguments_shadow->var_uses()->RecordUses(var->var_uses());
       }
     }
diff --git a/src/simulator-arm.cc b/src/simulator-arm.cc
index d76957f..321ab06 100644
--- a/src/simulator-arm.cc
+++ b/src/simulator-arm.cc
@@ -44,6 +44,15 @@
 using ::v8::internal::ReadLine;
 using ::v8::internal::DeleteArray;
 
+// This macro provides a platform independent use of sscanf. The reason for
+// SScanF not beeing implemented in a platform independent was through
+// ::v8::internal::OS in the same way as SNPrintF is that the Windows C Run-Time
+// Library does not provide vsscanf.
+#ifdef WIN32
+#define SScanF sscanf_s
+#else
+#define SScanF sscanf  // NOLINT
+#endif
 
 // The Debugger class is used by the simulator while debugging simulated ARM
 // code.
@@ -130,7 +139,7 @@
     }
     return true;
   } else {
-    return sscanf(desc, "%i", value) == 1;  // NOLINT
+    return SScanF(desc, "%i", value) == 1;
   }
   return false;
 }
@@ -215,7 +224,7 @@
     } else {
       // Use sscanf to parse the individual parts of the command line. At the
       // moment no command expects more than two parameters.
-      int args = sscanf(line,  // NOLINT
+      int args = SScanF(line,
                         "%" XSTR(COMMAND_SIZE) "s "
                         "%" XSTR(ARG_SIZE) "s "
                         "%" XSTR(ARG_SIZE) "s",
diff --git a/src/spaces.cc b/src/spaces.cc
index e8f460c..d1aaceb 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -1706,7 +1706,7 @@
 // with ']'.  RelocIterator 'it' must point to a comment reloc info.
 static void CollectCommentStatistics(RelocIterator* it) {
   ASSERT(!it->done());
-  ASSERT(it->rinfo()->rmode() == comment);
+  ASSERT(it->rinfo()->rmode() == RelocInfo::COMMENT);
   const char* tmp = reinterpret_cast<const char*>(it->rinfo()->data());
   if (tmp[0] != '[') {
     // Not a nested comment; skip
@@ -1723,7 +1723,7 @@
     // All nested comments must be terminated properly, and therefore exit
     // from loop.
     ASSERT(!it->done());
-    if (it->rinfo()->rmode() == comment) {
+    if (it->rinfo()->rmode() == RelocInfo::COMMENT) {
       const char* const txt =
           reinterpret_cast<const char*>(it->rinfo()->data());
       flat_delta += it->rinfo()->pc() - prev_pc;
@@ -1753,7 +1753,7 @@
       int delta = 0;
       const byte* prev_pc = code->instruction_start();
       while (!it.done()) {
-        if (it.rinfo()->rmode() == comment) {
+        if (it.rinfo()->rmode() == RelocInfo::COMMENT) {
           delta += it.rinfo()->pc() - prev_pc;
           CollectCommentStatistics(&it);
           prev_pc = it.rinfo()->pc();
diff --git a/src/string.js b/src/string.js
index da9ba6a..d8105fd 100644
--- a/src/string.js
+++ b/src/string.js
@@ -332,6 +332,7 @@
 // ECMA-262 section 15.5.4.7
 %AddProperty($String.prototype, "indexOf", function(searchString /* position */) {  // length == 1
   var str = ToString(this);
+  var str_len = str.length;
   var searchStr = ToString(searchString);
   var index = 0;
   if (%_ArgumentsLength() > 1) {
@@ -339,7 +340,8 @@
     index = TO_INTEGER(arg1);
   }
   if (index < 0) index = 0;
-  if (index > str.length) index = str.length;
+  if (index > str_len) index = str_len;
+  if (searchStr.length + index > str_len) return -1;
   return %StringIndexOf(str, searchStr, index);
 }, DONT_ENUM);
 
diff --git a/src/stub-cache-arm.cc b/src/stub-cache-arm.cc
index a31ff2f..3513c40 100644
--- a/src/stub-cache-arm.cc
+++ b/src/stub-cache-arm.cc
@@ -178,7 +178,7 @@
   __ pop(r1);
 
   // Tear down temporary frame.
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
 
   // Do a tail-call of the compiled function.
   __ Jump(r2);
@@ -232,7 +232,7 @@
   // Handle call cache miss.
   __ bind(&miss);
   Handle<Code> ic = ComputeCallMiss(arguments().immediate());
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(FIELD);
@@ -341,12 +341,13 @@
   // Jump to the cached code (tail call).
   Handle<Code> code(function->code());
   ParameterCount expected(function->shared()->formal_parameter_count());
-  __ InvokeCode(code, expected, arguments(), code_target, JUMP_FUNCTION);
+  __ InvokeCode(code, expected, arguments(),
+                RelocInfo::CODE_TARGET, JUMP_FUNCTION);
 
   // Handle call cache miss.
   __ bind(&miss);
   Handle<Code> ic = ComputeCallMiss(arguments().immediate());
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(CONSTANT_FUNCTION);
@@ -368,7 +369,7 @@
   // Handle call cache miss.
   __ bind(&miss);
   Handle<Code> ic = ComputeCallMiss(arguments().immediate());
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(INTERCEPTOR);
@@ -441,7 +442,7 @@
   __ bind(&miss);
   __ mov(r2, Operand(Handle<String>(name)));  // restore name
   Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(transition == NULL ? FIELD : MAP_TRANSITION);
@@ -498,7 +499,7 @@
   __ bind(&miss);
   __ mov(r2, Operand(Handle<String>(name)));  // restore name
   Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(CALLBACKS);
@@ -552,7 +553,7 @@
   __ bind(&miss);
   __ mov(r2, Operand(Handle<String>(name)));  // restore name
   Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(INTERCEPTOR);
@@ -592,7 +593,7 @@
   __ bind(&miss);
   __ ldr(r0, MemOperand(sp));  // restore receiver
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(FIELD);
@@ -634,7 +635,7 @@
   // Handle load cache miss.
   __ bind(&miss);
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(CALLBACKS);
@@ -668,7 +669,7 @@
   // Handle load cache miss.
   __ bind(&miss);
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(CONSTANT_FUNCTION);
@@ -708,7 +709,7 @@
   // Handle load cache miss.
   __ bind(&miss);
   Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss));
-  __ Jump(ic, code_target);
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(INTERCEPTOR);
diff --git a/src/stub-cache-ia32.cc b/src/stub-cache-ia32.cc
index fb2ff11..220035b 100644
--- a/src/stub-cache-ia32.cc
+++ b/src/stub-cache-ia32.cc
@@ -401,7 +401,7 @@
   }
 
   Handle<Code> ic(code);
-  __ jmp(ic, code_target);
+  __ jmp(ic, RelocInfo::CODE_TARGET);
 }
 
 
@@ -476,7 +476,7 @@
   __ CallRuntime(Runtime::kLazyCompile, 1);
   __ pop(edi);
 
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
 
   // Do a tail-call of the compiled function.
   __ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
@@ -526,7 +526,7 @@
   // Handle call cache miss.
   __ bind(&miss);
   Handle<Code> ic = ComputeCallMiss(arguments().immediate());
-  __ jmp(ic, code_target);
+  __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(FIELD);
@@ -631,12 +631,13 @@
   // Jump to the cached code (tail call).
   Handle<Code> code(function->code());
   ParameterCount expected(function->shared()->formal_parameter_count());
-  __ InvokeCode(code, expected, arguments(), code_target, JUMP_FUNCTION);
+  __ InvokeCode(code, expected, arguments(),
+                RelocInfo::CODE_TARGET, JUMP_FUNCTION);
 
   // Handle call cache miss.
   __ bind(&miss);
   Handle<Code> ic = ComputeCallMiss(arguments().immediate());
-  __ jmp(ic, code_target);
+  __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(CONSTANT_FUNCTION);
@@ -687,7 +688,7 @@
   __ mov(edx, Operand(ebp, (argc + 2) * kPointerSize));  // receiver
 
   // Exit frame.
-  __ ExitInternalFrame();
+  __ LeaveInternalFrame();
 
   // Check that the function really is a function.
   __ test(edi, Immediate(kSmiTagMask));
@@ -703,7 +704,7 @@
   // Handle load cache miss.
   __ bind(&miss);
   Handle<Code> ic = ComputeCallMiss(argc);
-  __ jmp(ic, code_target);
+  __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(INTERCEPTOR);
@@ -734,7 +735,7 @@
   __ bind(&miss);
   __ mov(Operand(ecx), Immediate(Handle<String>(name)));  // restore name
   Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
-  __ jmp(ic, code_target);
+  __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(transition == NULL ? FIELD : MAP_TRANSITION);
@@ -791,7 +792,7 @@
   __ bind(&miss);
   __ mov(Operand(ecx), Immediate(Handle<String>(name)));  // restore name
   Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
-  __ jmp(ic, code_target);
+  __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(CALLBACKS);
@@ -846,7 +847,7 @@
   __ bind(&miss);
   __ mov(Operand(ecx), Immediate(Handle<String>(name)));  // restore name
   Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
-  __ jmp(ic, code_target);
+  __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(INTERCEPTOR);
@@ -884,7 +885,7 @@
   __ bind(&miss);
   __ DecrementCounter(&Counters::keyed_store_field, 1);
   Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
-  __ jmp(ic, code_target);
+  __ jmp(ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
   return GetCode(transition == NULL ? FIELD : MAP_TRANSITION);
diff --git a/src/third_party/jscre/pcre.h b/src/third_party/jscre/pcre.h
index 1deeb85..e11f2ed 100644
--- a/src/third_party/jscre/pcre.h
+++ b/src/third_party/jscre/pcre.h
@@ -52,6 +52,7 @@
 typedef uint16_t UChar;
 
 struct JSRegExp;
+typedef struct JSRegExp JscreRegExp;
 
 enum JSRegExpIgnoreCaseOption { JSRegExpDoNotIgnoreCase, JSRegExpIgnoreCase };
 enum JSRegExpMultilineOption { JSRegExpSingleLine, JSRegExpMultiline };
diff --git a/src/v8natives.js b/src/v8natives.js
index afd67fd..f65de4f 100644
--- a/src/v8natives.js
+++ b/src/v8natives.js
@@ -201,7 +201,7 @@
 %AddProperty(global, "eval", function(x) {
   if (!IS_STRING(x)) return x;
 
-  var f = %CompileString(x, true);
+  var f = %CompileString(x, 0, true);
   if (!IS_FUNCTION(f)) return f;
 
   return f.call(%EvalReceiver(this));
@@ -212,7 +212,7 @@
 %AddProperty(global, "execScript", function(expr, lang) {
   // NOTE: We don't care about the character casing.
   if (!lang || /javascript/i.test(lang)) {
-    var f = %CompileString(ToString(expr), false);
+    var f = %CompileString(ToString(expr), 0, false);
     f.call(global);
   }
   return null;
@@ -406,11 +406,13 @@
     if (p.indexOf(')') != -1) throw MakeSyntaxError('unable_to_parse',[]);
   }
   var body = (n > 0) ? ToString(%_Arguments(n - 1)) : '';
-  var source = '(function anonymous(' + p + ') { ' + body + ' })';
+  var source = '(function(' + p + ') {\n' + body + '\n})';
 
   // The call to SetNewFunctionAttributes will ensure the prototype
   // property of the resulting function is enumerable (ECMA262, 15.3.5.2).
-  return %SetNewFunctionAttributes(%CompileString(source, false)());
+  var f = %CompileString(source, -1, false)();
+  %FunctionSetName(f, "anonymous");
+  return %SetNewFunctionAttributes(f);
 };
 
 %SetCode($Function, NewFunction);