Version 3.10.3

Fixed several bugs in heap profiles (including issue 2078).

Throw syntax errors on illegal escape sequences.

Implemented rudimentary module linking (behind --harmony flag)

Implemented ES5 erratum: Global declarations should shadow
inherited properties.

Made handling of const more consistent when combined with 'eval'
and 'with'.

Fixed V8 on MinGW-x64 (issue 2026).

Performance and stability improvements on all platforms.
Review URL: https://chromiumcodereview.appspot.com/10105026

git-svn-id: http://v8.googlecode.com/svn/trunk@11353 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/api.cc b/src/api.cc
index 067609a..c7252ba 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -5998,7 +5998,7 @@
 const HeapGraphNode* HeapGraphEdge::GetFromNode() const {
   i::Isolate* isolate = i::Isolate::Current();
   IsDeadCheck(isolate, "v8::HeapGraphEdge::GetFromNode");
-  const i::HeapEntry* from = ToInternal(this)->From();
+  const i::HeapEntry* from = ToInternal(this)->from();
   return reinterpret_cast<const HeapGraphNode*>(from);
 }
 
@@ -6218,6 +6218,14 @@
 }
 
 
+SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Value> value) {
+  i::Isolate* isolate = i::Isolate::Current();
+  IsDeadCheck(isolate, "v8::HeapProfiler::GetSnapshotObjectId");
+  i::Handle<i::Object> obj = Utils::OpenHandle(*value);
+  return i::HeapProfiler::GetSnapshotObjectId(obj);
+}
+
+
 const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
                                                HeapSnapshot::Type type,
                                                ActivityControl* control) {
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index f772db9..2f14d19 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -5873,36 +5873,12 @@
   // r2: result string length
   __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
   __ cmp(r2, Operand(r4, ASR, 1));
+  // Return original string.
   __ b(eq, &return_r0);
+  // Longer than original string's length or negative: unsafe arguments.
+  __ b(hi, &runtime);
+  // Shorter than original string's length: an actual substring.
 
-  Label result_longer_than_two;
-  // Check for special case of two character ASCII string, in which case
-  // we do a lookup in the symbol table first.
-  __ cmp(r2, Operand(2));
-  __ b(gt, &result_longer_than_two);
-  __ b(lt, &runtime);
-
-  __ JumpIfInstanceTypeIsNotSequentialAscii(r1, r1, &runtime);
-
-  // Get the two characters forming the sub string.
-  __ add(r0, r0, Operand(r3));
-  __ ldrb(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
-  __ ldrb(r4, FieldMemOperand(r0, SeqAsciiString::kHeaderSize + 1));
-
-  // Try to lookup two character string in symbol table.
-  Label make_two_character_string;
-  StringHelper::GenerateTwoCharacterSymbolTableProbe(
-      masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string);
-  __ jmp(&return_r0);
-
-  // r2: result string length.
-  // r3: two characters combined into halfword in little endian byte order.
-  __ bind(&make_two_character_string);
-  __ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime);
-  __ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
-  __ jmp(&return_r0);
-
-  __ bind(&result_longer_than_two);
   // Deal with different string types: update the index if necessary
   // and put the underlying string into r5.
   // r0: original string
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index db95f78..e7555cb 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -266,11 +266,11 @@
       // For named function expressions, declare the function name as a
       // constant.
       if (scope()->is_function_scope() && scope()->function() != NULL) {
-        VariableProxy* proxy = scope()->function();
-        ASSERT(proxy->var()->mode() == CONST ||
-               proxy->var()->mode() == CONST_HARMONY);
-        ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
-        EmitDeclaration(proxy, proxy->var()->mode(), NULL);
+        VariableDeclaration* function = scope()->function();
+        ASSERT(function->proxy()->var()->mode() == CONST ||
+               function->proxy()->var()->mode() == CONST_HARMONY);
+        ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
+        VisitVariableDeclaration(function);
       }
       VisitDeclarations(scope()->declarations());
     }
@@ -780,62 +780,51 @@
 }
 
 
-void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
-                                        VariableMode mode,
-                                        FunctionLiteral* function) {
+void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
+  // The variable in the declaration always resides in the current function
+  // context.
+  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+  if (FLAG_debug_code) {
+    // Check that we're not inside a with or catch context.
+    __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
+    __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
+    __ Check(ne, "Declaration in with context.");
+    __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
+    __ Check(ne, "Declaration in catch context.");
+  }
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(
+    VariableDeclaration* declaration) {
   // If it was not possible to allocate the variable at compile time, we
   // need to "declare" it at runtime to make sure it actually exists in the
   // local context.
+  VariableProxy* proxy = declaration->proxy();
+  VariableMode mode = declaration->mode();
   Variable* variable = proxy->var();
-  bool binding_needs_init = (function == NULL) &&
-      (mode == CONST || mode == CONST_HARMONY || mode == LET);
+  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
   switch (variable->location()) {
     case Variable::UNALLOCATED:
-      ++global_count_;
+      globals_->Add(variable->name());
+      globals_->Add(variable->binding_needs_init()
+                        ? isolate()->factory()->the_hole_value()
+                        : isolate()->factory()->undefined_value());
       break;
 
     case Variable::PARAMETER:
     case Variable::LOCAL:
-      if (function != NULL) {
-        Comment cmnt(masm_, "[ Declaration");
-        VisitForAccumulatorValue(function);
-        __ str(result_register(), StackOperand(variable));
-      } else if (binding_needs_init) {
-        Comment cmnt(masm_, "[ Declaration");
+      if (hole_init) {
+        Comment cmnt(masm_, "[ VariableDeclaration");
         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
         __ str(ip, StackOperand(variable));
       }
       break;
 
     case Variable::CONTEXT:
-      // The variable in the decl always resides in the current function
-      // context.
-      ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
-      if (FLAG_debug_code) {
-        // Check that we're not inside a with or catch context.
-        __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
-        __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
-        __ Check(ne, "Declaration in with context.");
-        __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
-        __ Check(ne, "Declaration in catch context.");
-      }
-      if (function != NULL) {
-        Comment cmnt(masm_, "[ Declaration");
-        VisitForAccumulatorValue(function);
-        __ str(result_register(), ContextOperand(cp, variable->index()));
-        int offset = Context::SlotOffset(variable->index());
-        // We know that we have written a function, which is not a smi.
-        __ RecordWriteContextSlot(cp,
-                                  offset,
-                                  result_register(),
-                                  r2,
-                                  kLRHasBeenSaved,
-                                  kDontSaveFPRegs,
-                                  EMIT_REMEMBERED_SET,
-                                  OMIT_SMI_CHECK);
-        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
-      } else if (binding_needs_init) {
-        Comment cmnt(masm_, "[ Declaration");
+      if (hole_init) {
+        Comment cmnt(masm_, "[ VariableDeclaration");
+        EmitDebugCheckDeclarationContext(variable);
         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
         __ str(ip, ContextOperand(cp, variable->index()));
         // No write barrier since the_hole_value is in old space.
@@ -844,13 +833,11 @@
       break;
 
     case Variable::LOOKUP: {
-      Comment cmnt(masm_, "[ Declaration");
+      Comment cmnt(masm_, "[ VariableDeclaration");
       __ mov(r2, Operand(variable->name()));
       // Declaration nodes are always introduced in one of four modes.
-      ASSERT(mode == VAR ||
-             mode == CONST ||
-             mode == CONST_HARMONY ||
-             mode == LET);
+      ASSERT(mode == VAR || mode == LET ||
+             mode == CONST || mode == CONST_HARMONY);
       PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
           ? READ_ONLY : NONE;
       __ mov(r1, Operand(Smi::FromInt(attr)));
@@ -858,11 +845,7 @@
       // Note: For variables we must not push an initial value (such as
       // 'undefined') because we may have a (legal) redeclaration and we
       // must not destroy the current value.
-      if (function != NULL) {
-        __ Push(cp, r2, r1);
-        // Push initial value for function declaration.
-        VisitForStackValue(function);
-      } else if (binding_needs_init) {
+      if (hole_init) {
         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
         __ Push(cp, r2, r1, r0);
       } else {
@@ -876,6 +859,122 @@
 }
 
 
+void FullCodeGenerator::VisitFunctionDeclaration(
+    FunctionDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  switch (variable->location()) {
+    case Variable::UNALLOCATED: {
+      globals_->Add(variable->name());
+      Handle<SharedFunctionInfo> function =
+          Compiler::BuildFunctionInfo(declaration->fun(), script());
+      // Check for stack-overflow exception.
+      if (function.is_null()) return SetStackOverflow();
+      globals_->Add(function);
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      VisitForAccumulatorValue(declaration->fun());
+      __ str(result_register(), StackOperand(variable));
+      break;
+    }
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      VisitForAccumulatorValue(declaration->fun());
+      __ str(result_register(), ContextOperand(cp, variable->index()));
+      int offset = Context::SlotOffset(variable->index());
+      // We know that we have written a function, which is not a smi.
+      __ RecordWriteContextSlot(cp,
+                                offset,
+                                result_register(),
+                                r2,
+                                kLRHasBeenSaved,
+                                kDontSaveFPRegs,
+                                EMIT_REMEMBERED_SET,
+                                OMIT_SMI_CHECK);
+      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
+      break;
+    }
+
+    case Variable::LOOKUP: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      __ mov(r2, Operand(variable->name()));
+      __ mov(r1, Operand(Smi::FromInt(NONE)));
+      __ Push(cp, r2, r1);
+      // Push initial value for function declaration.
+      VisitForStackValue(declaration->fun());
+      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+      break;
+    }
+  }
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  Handle<JSModule> instance = declaration->module()->interface()->Instance();
+  ASSERT(!instance.is_null());
+
+  switch (variable->location()) {
+    case Variable::UNALLOCATED: {
+      Comment cmnt(masm_, "[ ModuleDeclaration");
+      globals_->Add(variable->name());
+      globals_->Add(instance);
+      Visit(declaration->module());
+      break;
+    }
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ ModuleDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      __ mov(r1, Operand(instance));
+      __ str(r1, ContextOperand(cp, variable->index()));
+      Visit(declaration->module());
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL:
+    case Variable::LOOKUP:
+      UNREACHABLE();
+  }
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  switch (variable->location()) {
+    case Variable::UNALLOCATED:
+      // TODO(rossberg)
+      break;
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ ImportDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      // TODO(rossberg)
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL:
+    case Variable::LOOKUP:
+      UNREACHABLE();
+  }
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
+  // TODO(rossberg)
+}
+
+
 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
   // Call the runtime to declare the globals.
   // The context is the first argument.
@@ -4446,7 +4545,8 @@
 
 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   Scope* declaration_scope = scope()->DeclarationScope();
-  if (declaration_scope->is_global_scope()) {
+  if (declaration_scope->is_global_scope() ||
+      declaration_scope->is_module_scope()) {
     // Contexts nested in the global context have a canonical empty function
     // as their closure, not the anonymous closure containing the global
     // code.  Pass a smi sentinel and let the runtime look up the empty
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 147b02d..3b61334 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -4659,9 +4659,10 @@
         __ str(r2, FieldMemOperand(result, total_offset + 4));
       }
     } else if (elements->IsFixedArray()) {
+      Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
       for (int i = 0; i < elements_length; i++) {
         int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
-        Handle<Object> value = JSObject::GetElement(object, i);
+        Handle<Object> value(fast_elements->get(i));
         if (value->IsJSObject()) {
           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
           __ add(r2, result, Operand(*offset));
diff --git a/src/array.js b/src/array.js
index daa75d5..d611837 100644
--- a/src/array.js
+++ b/src/array.js
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -465,15 +465,19 @@
 }
 
 
+// Returns an array containing the array elements of the object followed
+// by the array elements of each argument in order. See ECMA-262,
+// section 15.4.4.7.
 function ArrayConcat(arg1) {  // length == 1
   if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
     throw MakeTypeError("called_on_null_or_undefined",
                         ["Array.prototype.concat"]);
   }
 
+  var array = ToObject(this);
   var arg_count = %_ArgumentsLength();
   var arrays = new InternalArray(1 + arg_count);
-  arrays[0] = this;
+  arrays[0] = array;
   for (var i = 0; i < arg_count; i++) {
     arrays[i + 1] = %_Arguments(i);
   }
diff --git a/src/ast.cc b/src/ast.cc
index 4b6ae68..22645a8 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -993,138 +993,78 @@
 }
 
 
-#define INCREASE_NODE_COUNT(NodeType) \
+#define REGULAR_NODE(NodeType) \
   void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
     increase_node_count(); \
   }
+#define DONT_OPTIMIZE_NODE(NodeType) \
+  void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+    increase_node_count(); \
+    add_flag(kDontOptimize); \
+    add_flag(kDontInline); \
+    add_flag(kDontSelfOptimize); \
+  }
+#define DONT_INLINE_NODE(NodeType) \
+  void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+    increase_node_count(); \
+    add_flag(kDontInline); \
+  }
+#define DONT_SELFOPTIMIZE_NODE(NodeType) \
+  void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+    increase_node_count(); \
+    add_flag(kDontSelfOptimize); \
+  }
 
-INCREASE_NODE_COUNT(VariableDeclaration)
-INCREASE_NODE_COUNT(FunctionDeclaration)
-INCREASE_NODE_COUNT(ModuleDeclaration)
-INCREASE_NODE_COUNT(ImportDeclaration)
-INCREASE_NODE_COUNT(ExportDeclaration)
-INCREASE_NODE_COUNT(ModuleLiteral)
-INCREASE_NODE_COUNT(ModuleVariable)
-INCREASE_NODE_COUNT(ModulePath)
-INCREASE_NODE_COUNT(ModuleUrl)
-INCREASE_NODE_COUNT(Block)
-INCREASE_NODE_COUNT(ExpressionStatement)
-INCREASE_NODE_COUNT(EmptyStatement)
-INCREASE_NODE_COUNT(IfStatement)
-INCREASE_NODE_COUNT(ContinueStatement)
-INCREASE_NODE_COUNT(BreakStatement)
-INCREASE_NODE_COUNT(ReturnStatement)
-INCREASE_NODE_COUNT(Conditional)
-INCREASE_NODE_COUNT(Literal)
-INCREASE_NODE_COUNT(ObjectLiteral)
-INCREASE_NODE_COUNT(Assignment)
-INCREASE_NODE_COUNT(Throw)
-INCREASE_NODE_COUNT(Property)
-INCREASE_NODE_COUNT(UnaryOperation)
-INCREASE_NODE_COUNT(CountOperation)
-INCREASE_NODE_COUNT(BinaryOperation)
-INCREASE_NODE_COUNT(CompareOperation)
-INCREASE_NODE_COUNT(ThisFunction)
-INCREASE_NODE_COUNT(Call)
-INCREASE_NODE_COUNT(CallNew)
+REGULAR_NODE(VariableDeclaration)
+REGULAR_NODE(FunctionDeclaration)
+REGULAR_NODE(Block)
+REGULAR_NODE(ExpressionStatement)
+REGULAR_NODE(EmptyStatement)
+REGULAR_NODE(IfStatement)
+REGULAR_NODE(ContinueStatement)
+REGULAR_NODE(BreakStatement)
+REGULAR_NODE(ReturnStatement)
+REGULAR_NODE(Conditional)
+REGULAR_NODE(Literal)
+REGULAR_NODE(ObjectLiteral)
+REGULAR_NODE(Assignment)
+REGULAR_NODE(Throw)
+REGULAR_NODE(Property)
+REGULAR_NODE(UnaryOperation)
+REGULAR_NODE(CountOperation)
+REGULAR_NODE(BinaryOperation)
+REGULAR_NODE(CompareOperation)
+REGULAR_NODE(ThisFunction)
+REGULAR_NODE(Call)
+REGULAR_NODE(CallNew)
+// In theory, for VariableProxy we'd have to add:
+// if (node->var()->IsLookupSlot()) add_flag(kDontInline);
+// But node->var() is usually not bound yet at VariableProxy creation time, and
+// LOOKUP variables only result from constructs that cannot be inlined anyway.
+REGULAR_NODE(VariableProxy)
 
-#undef INCREASE_NODE_COUNT
+DONT_OPTIMIZE_NODE(ModuleDeclaration)
+DONT_OPTIMIZE_NODE(ImportDeclaration)
+DONT_OPTIMIZE_NODE(ExportDeclaration)
+DONT_OPTIMIZE_NODE(ModuleLiteral)
+DONT_OPTIMIZE_NODE(ModuleVariable)
+DONT_OPTIMIZE_NODE(ModulePath)
+DONT_OPTIMIZE_NODE(ModuleUrl)
+DONT_OPTIMIZE_NODE(WithStatement)
+DONT_OPTIMIZE_NODE(TryCatchStatement)
+DONT_OPTIMIZE_NODE(TryFinallyStatement)
+DONT_OPTIMIZE_NODE(DebuggerStatement)
+DONT_OPTIMIZE_NODE(SharedFunctionInfoLiteral)
 
+DONT_INLINE_NODE(SwitchStatement)
+DONT_INLINE_NODE(FunctionLiteral)
+DONT_INLINE_NODE(RegExpLiteral)  // TODO(1322): Allow materialized literals.
+DONT_INLINE_NODE(ArrayLiteral)  // TODO(1322): Allow materialized literals.
 
-void AstConstructionVisitor::VisitWithStatement(WithStatement* node) {
-  increase_node_count();
-  add_flag(kDontOptimize);
-  add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) {
-  increase_node_count();
-  add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) {
-  increase_node_count();
-  add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) {
-  increase_node_count();
-  add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitForStatement(ForStatement* node) {
-  increase_node_count();
-  add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) {
-  increase_node_count();
-  add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
-  increase_node_count();
-  add_flag(kDontOptimize);
-  add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitTryFinallyStatement(
-    TryFinallyStatement* node) {
-  increase_node_count();
-  add_flag(kDontOptimize);
-  add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) {
-  increase_node_count();
-  add_flag(kDontOptimize);
-  add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) {
-  increase_node_count();
-  add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitSharedFunctionInfoLiteral(
-    SharedFunctionInfoLiteral* node) {
-  increase_node_count();
-  add_flag(kDontOptimize);
-  add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) {
-  increase_node_count();
-  // In theory, we'd have to add:
-  // if(node->var()->IsLookupSlot()) { add_flag(kDontInline); }
-  // However, node->var() is usually not bound yet at VariableProxy creation
-  // time, and LOOKUP variables only result from constructs that cannot
-  // be inlined anyway.
-}
-
-
-void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) {
-  increase_node_count();
-  add_flag(kDontInline);  // TODO(1322): Allow materialized literals.
-}
-
-
-void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) {
-  increase_node_count();
-  add_flag(kDontInline);  // TODO(1322): Allow materialized literals.
-}
-
+DONT_SELFOPTIMIZE_NODE(DoWhileStatement)
+DONT_SELFOPTIMIZE_NODE(WhileStatement)
+DONT_SELFOPTIMIZE_NODE(ForStatement)
+DONT_SELFOPTIMIZE_NODE(ForInStatement)
 
 void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
   increase_node_count();
@@ -1142,6 +1082,11 @@
   }
 }
 
+#undef REGULAR_NODE
+#undef DONT_OPTIMIZE_NODE
+#undef DONT_INLINE_NODE
+#undef DONT_SELFOPTIMIZE_NODE
+
 
 Handle<String> Literal::ToString() {
   if (handle_->IsString()) return Handle<String>::cast(handle_);
diff --git a/src/ast.h b/src/ast.h
index d6c47e2..dad8057 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -421,8 +421,8 @@
   ZoneList<Statement*>* statements() { return &statements_; }
   bool is_initializer_block() const { return is_initializer_block_; }
 
-  Scope* block_scope() const { return block_scope_; }
-  void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; }
+  Scope* scope() const { return scope_; }
+  void set_scope(Scope* scope) { scope_ = scope; }
 
  protected:
   template<class> friend class AstNodeFactory;
@@ -434,13 +434,13 @@
       : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY),
         statements_(capacity),
         is_initializer_block_(is_initializer_block),
-        block_scope_(NULL) {
+        scope_(NULL) {
   }
 
  private:
   ZoneList<Statement*> statements_;
   bool is_initializer_block_;
-  Scope* block_scope_;
+  Scope* scope_;
 };
 
 
@@ -608,6 +608,7 @@
   DECLARE_NODE_TYPE(ModuleLiteral)
 
   Block* body() const { return body_; }
+  Handle<Context> context() const { return context_; }
 
  protected:
   template<class> friend class AstNodeFactory;
@@ -619,6 +620,7 @@
 
  private:
   Block* body_;
+  Handle<Context> context_;
 };
 
 
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 0e95b4b..6178815 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -2159,7 +2159,7 @@
     Handle<DescriptorArray> descs =
         Handle<DescriptorArray>(from->map()->instance_descriptors());
     for (int i = 0; i < descs->number_of_descriptors(); i++) {
-      PropertyDetails details = PropertyDetails(descs->GetDetails(i));
+      PropertyDetails details = descs->GetDetails(i);
       switch (details.type()) {
         case FIELD: {
           HandleScope inner;
diff --git a/src/contexts.h b/src/contexts.h
index af5cb03..647c15c 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -397,7 +397,7 @@
   GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSORS)
 #undef GLOBAL_CONTEXT_FIELD_ACCESSORS
 
-  // Lookup the the slot called name, starting with the current context.
+  // Lookup the slot called name, starting with the current context.
   // There are three possibilities:
   //
   // 1) result->IsContext():
diff --git a/src/factory.cc b/src/factory.cc
index e8a9f26..6bb7893 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -291,6 +291,15 @@
 }
 
 
+Handle<Context> Factory::NewModuleContext(Handle<Context> previous,
+                                          Handle<ScopeInfo> scope_info) {
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateModuleContext(*previous, *scope_info),
+      Context);
+}
+
+
 Handle<Context> Factory::NewFunctionContext(int length,
                                             Handle<JSFunction> function) {
   CALL_HEAP_FUNCTION(
@@ -324,10 +333,9 @@
 }
 
 
-Handle<Context> Factory::NewBlockContext(
-    Handle<JSFunction> function,
-    Handle<Context> previous,
-    Handle<ScopeInfo> scope_info) {
+Handle<Context> Factory::NewBlockContext(Handle<JSFunction> function,
+                                         Handle<Context> previous,
+                                         Handle<ScopeInfo> scope_info) {
   CALL_HEAP_FUNCTION(
       isolate(),
       isolate()->heap()->AllocateBlockContext(*function,
@@ -928,6 +936,13 @@
 }
 
 
+Handle<JSModule> Factory::NewJSModule() {
+  CALL_HEAP_FUNCTION(
+      isolate(),
+      isolate()->heap()->AllocateJSModule(), JSModule);
+}
+
+
 Handle<GlobalObject> Factory::NewGlobalObject(
     Handle<JSFunction> constructor) {
   CALL_HEAP_FUNCTION(isolate(),
diff --git a/src/factory.h b/src/factory.h
index 786d4a9..06aad1b 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -162,9 +162,12 @@
   // Create a global (but otherwise uninitialized) context.
   Handle<Context> NewGlobalContext();
 
+  // Create a module context.
+  Handle<Context> NewModuleContext(Handle<Context> previous,
+                                   Handle<ScopeInfo> scope_info);
+
   // Create a function context.
-  Handle<Context> NewFunctionContext(int length,
-                                     Handle<JSFunction> function);
+  Handle<Context> NewFunctionContext(int length, Handle<JSFunction> function);
 
   // Create a catch context.
   Handle<Context> NewCatchContext(Handle<JSFunction> function,
@@ -177,7 +180,7 @@
                                  Handle<Context> previous,
                                  Handle<JSObject> extension);
 
-  // Create a 'block' context.
+  // Create a block context.
   Handle<Context> NewBlockContext(Handle<JSFunction> function,
                                   Handle<Context> previous,
                                   Handle<ScopeInfo> scope_info);
@@ -262,6 +265,9 @@
   // runtime.
   Handle<JSObject> NewJSObjectFromMap(Handle<Map> map);
 
+  // JS modules are pretenured.
+  Handle<JSModule> NewJSModule();
+
   // JS arrays are pretenured when allocated by the parser.
   Handle<JSArray> NewJSArray(int capacity,
                              ElementsKind elements_kind = FAST_ELEMENTS,
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index 449c5d2..b8794c0 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -568,88 +568,91 @@
 
 void FullCodeGenerator::VisitDeclarations(
     ZoneList<Declaration*>* declarations) {
-  int save_global_count = global_count_;
-  global_count_ = 0;
+  ZoneList<Handle<Object> >* saved_globals = globals_;
+  ZoneList<Handle<Object> > inner_globals(10);
+  globals_ = &inner_globals;
 
   AstVisitor::VisitDeclarations(declarations);
-
-  // Batch declare global functions and variables.
-  if (global_count_ > 0) {
-    Handle<FixedArray> array =
-       isolate()->factory()->NewFixedArray(2 * global_count_, TENURED);
-    int length = declarations->length();
-    for (int j = 0, i = 0; i < length; i++) {
-      Declaration* decl = declarations->at(i);
-      Variable* var = decl->proxy()->var();
-
-      if (var->IsUnallocated()) {
-        array->set(j++, *(var->name()));
-        FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
-        if (fun_decl == NULL) {
-          if (var->binding_needs_init()) {
-            // In case this binding needs initialization use the hole.
-            array->set_the_hole(j++);
-          } else {
-            array->set_undefined(j++);
-          }
-        } else {
-          Handle<SharedFunctionInfo> function =
-              Compiler::BuildFunctionInfo(fun_decl->fun(), script());
-          // Check for stack-overflow exception.
-          if (function.is_null()) {
-            SetStackOverflow();
-            return;
-          }
-          array->set(j++, *function);
-        }
-      }
-    }
+  if (!globals_->is_empty()) {
     // Invoke the platform-dependent code generator to do the actual
     // declaration the global functions and variables.
+    Handle<FixedArray> array =
+       isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
+    for (int i = 0; i < globals_->length(); ++i)
+      array->set(i, *globals_->at(i));
     DeclareGlobals(array);
   }
 
-  global_count_ = save_global_count;
-}
-
-
-void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
-  EmitDeclaration(decl->proxy(), decl->mode(), NULL);
-}
-
-
-void FullCodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
-  EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
-}
-
-
-void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) {
-  EmitDeclaration(decl->proxy(), decl->mode(), NULL);
-}
-
-
-void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* decl) {
-  EmitDeclaration(decl->proxy(), decl->mode(), NULL);
-}
-
-
-void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* decl) {
-  // TODO(rossberg)
+  globals_ = saved_globals;
 }
 
 
 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
-  // TODO(rossberg)
+  Handle<JSModule> instance = module->interface()->Instance();
+  ASSERT(!instance.is_null());
+
+  // Allocate a module context statically.
+  Block* block = module->body();
+  Scope* saved_scope = scope();
+  scope_ = block->scope();
+  Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
+
+  // Generate code for module creation and linking.
+  Comment cmnt(masm_, "[ ModuleLiteral");
+  SetStatementPosition(block);
+
+  if (scope_info->HasContext()) {
+    // Set up module context.
+    __ Push(scope_info);
+    __ Push(instance);
+    __ CallRuntime(Runtime::kPushModuleContext, 2);
+    StoreToFrameField(
+        StandardFrameConstants::kContextOffset, context_register());
+  }
+
+  {
+    Comment cmnt(masm_, "[ Declarations");
+    VisitDeclarations(scope_->declarations());
+  }
+
+  scope_ = saved_scope;
+  if (scope_info->HasContext()) {
+    // Pop module context.
+    LoadContextField(context_register(), Context::PREVIOUS_INDEX);
+    // Update local stack frame context field.
+    StoreToFrameField(
+        StandardFrameConstants::kContextOffset, context_register());
+  }
+
+  // Populate module instance object.
+  const PropertyAttributes attr =
+      static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE | DONT_ENUM);
+  for (Interface::Iterator it = module->interface()->iterator();
+       !it.done(); it.Advance()) {
+    if (it.interface()->IsModule()) {
+      Handle<Object> value = it.interface()->Instance();
+      ASSERT(!value.is_null());
+      JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode);
+    } else {
+      // TODO(rossberg): set proper getters instead of undefined...
+      // instance->DefineAccessor(*it.name(), ACCESSOR_GETTER, *getter, attr);
+      Handle<Object> value(isolate()->heap()->undefined_value());
+      JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode);
+    }
+  }
+  USE(instance->PreventExtensions());
 }
 
 
 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
-  // TODO(rossberg)
+  // Noting to do.
+  // The instance object is resolved statically through the module's interface.
 }
 
 
 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
-  // TODO(rossberg)
+  // Noting to do.
+  // The instance object is resolved statically through the module's interface.
 }
 
 
@@ -911,9 +914,9 @@
 
   Scope* saved_scope = scope();
   // Push a block context when entering a block with block scoped variables.
-  if (stmt->block_scope() != NULL) {
+  if (stmt->scope() != NULL) {
     { Comment cmnt(masm_, "[ Extend block context");
-      scope_ = stmt->block_scope();
+      scope_ = stmt->scope();
       Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
       int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
       __ Push(scope_info);
@@ -940,7 +943,7 @@
   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
 
   // Pop block context if necessary.
-  if (stmt->block_scope() != NULL) {
+  if (stmt->scope() != NULL) {
     LoadContextField(context_register(), Context::PREVIOUS_INDEX);
     // Update local stack frame context field.
     StoreToFrameField(StandardFrameConstants::kContextOffset,
diff --git a/src/full-codegen.h b/src/full-codegen.h
index a308d83..0e0ffe9 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -83,7 +83,7 @@
         scope_(info->scope()),
         nesting_stack_(NULL),
         loop_depth_(0),
-        global_count_(0),
+        globals_(NULL),
         context_(NULL),
         bailout_entries_(info->HasDeoptimizationSupport()
                          ? info->function()->ast_node_count() : 0),
@@ -202,7 +202,7 @@
     virtual ~NestedBlock() {}
 
     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
-      if (statement()->AsBlock()->block_scope() != NULL) {
+      if (statement()->AsBlock()->scope() != NULL) {
         ++(*context_length);
       }
       return previous_;
@@ -413,12 +413,9 @@
                                     Label* if_true,
                                     Label* if_false);
 
-  // Platform-specific code for a variable, constant, or function
-  // declaration.  Functions have an initial value.
-  // Increments global_count_ for unallocated variables.
-  void EmitDeclaration(VariableProxy* proxy,
-                       VariableMode mode,
-                       FunctionLiteral* function);
+  // If enabled, emit debug code for checking that the current context is
+  // neither a with nor a catch context.
+  void EmitDebugCheckDeclarationContext(Variable* variable);
 
   // Platform-specific code for checking the stack limit at the back edge of
   // a loop.
@@ -548,12 +545,8 @@
   Handle<Script> script() { return info_->script(); }
   bool is_eval() { return info_->is_eval(); }
   bool is_native() { return info_->is_native(); }
-  bool is_classic_mode() {
-    return language_mode() == CLASSIC_MODE;
-  }
-  LanguageMode language_mode() {
-    return function()->language_mode();
-  }
+  bool is_classic_mode() { return language_mode() == CLASSIC_MODE; }
+  LanguageMode language_mode() { return function()->language_mode(); }
   FunctionLiteral* function() { return info_->function(); }
   Scope* scope() { return scope_; }
 
@@ -785,7 +778,7 @@
   Label return_label_;
   NestedStatement* nesting_stack_;
   int loop_depth_;
-  int global_count_;
+  ZoneList<Handle<Object> >* globals_;
   const ExpressionContext* context_;
   ZoneList<BailoutEntry> bailout_entries_;
   ZoneList<BailoutEntry> stack_checks_;
diff --git a/src/handles.cc b/src/handles.cc
index 416ecbd..def1604 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
 // Redistribution and use in source and binary forms, with or without
 // modification, are permitted provided that the following conditions are
 // met:
@@ -729,9 +729,9 @@
         Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
 
     for (int i = 0; i < descs->number_of_descriptors(); i++) {
-      if (descs->IsProperty(i) && !descs->IsDontEnum(i)) {
+      if (descs->IsProperty(i) && !descs->GetDetails(i).IsDontEnum()) {
         storage->set(index, descs->GetKey(i));
-        PropertyDetails details(descs->GetDetails(i));
+        PropertyDetails details = descs->GetDetails(i);
         sort_array->set(index, Smi::FromInt(details.index()));
         if (!indices.is_null()) {
           if (details.type() != FIELD) {
diff --git a/src/heap-profiler.cc b/src/heap-profiler.cc
index cda1a87..2e971a5 100644
--- a/src/heap-profiler.cc
+++ b/src/heap-profiler.cc
@@ -189,6 +189,15 @@
 }
 
 
+SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) {
+  if (!obj->IsHeapObject())
+    return v8::HeapProfiler::kUnknownObjectId;
+  HeapProfiler* profiler = Isolate::Current()->heap_profiler();
+  ASSERT(profiler != NULL);
+  return profiler->snapshots_->FindObjectId(HeapObject::cast(*obj)->address());
+}
+
+
 void HeapProfiler::DeleteAllSnapshots() {
   HeapProfiler* profiler = Isolate::Current()->heap_profiler();
   ASSERT(profiler != NULL);
diff --git a/src/heap-profiler.h b/src/heap-profiler.h
index 2308674..96b042d 100644
--- a/src/heap-profiler.h
+++ b/src/heap-profiler.h
@@ -62,6 +62,7 @@
   static int GetSnapshotsCount();
   static HeapSnapshot* GetSnapshot(int index);
   static HeapSnapshot* FindSnapshot(unsigned uid);
+  static SnapshotObjectId GetSnapshotObjectId(Handle<Object> obj);
   static void DeleteAllSnapshots();
 
   void ObjectMoveEvent(Address from, Address to);
diff --git a/src/heap.cc b/src/heap.cc
index 22dbbeb..9081017 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -1124,6 +1124,27 @@
 }
 
 
+class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
+ public:
+  explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
+
+  virtual Object* RetainAs(Object* object) {
+    if (!heap_->InFromSpace(object)) {
+      return object;
+    }
+
+    MapWord map_word = HeapObject::cast(object)->map_word();
+    if (map_word.IsForwardingAddress()) {
+      return map_word.ToForwardingAddress();
+    }
+    return NULL;
+  }
+
+ private:
+  Heap* heap_;
+};
+
+
 void Heap::Scavenge() {
 #ifdef DEBUG
   if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
@@ -1222,6 +1243,9 @@
   }
   incremental_marking()->UpdateMarkingDequeAfterScavenge();
 
+  ScavengeWeakObjectRetainer weak_object_retainer(this);
+  ProcessWeakReferences(&weak_object_retainer);
+
   ASSERT(new_space_front == new_space_.top());
 
   // Set age mark.
@@ -1308,7 +1332,8 @@
 
 static Object* ProcessFunctionWeakReferences(Heap* heap,
                                              Object* function,
-                                             WeakObjectRetainer* retainer) {
+                                             WeakObjectRetainer* retainer,
+                                             bool record_slots) {
   Object* undefined = heap->undefined_value();
   Object* head = undefined;
   JSFunction* tail = NULL;
@@ -1325,6 +1350,12 @@
         // Subsequent elements in the list.
         ASSERT(tail != NULL);
         tail->set_next_function_link(retain);
+        if (record_slots) {
+          Object** next_function =
+              HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset);
+          heap->mark_compact_collector()->RecordSlot(
+              next_function, next_function, retain);
+        }
       }
       // Retained function is new tail.
       candidate_function = reinterpret_cast<JSFunction*>(retain);
@@ -1353,6 +1384,15 @@
   Object* head = undefined;
   Context* tail = NULL;
   Object* candidate = global_contexts_list_;
+
+  // We don't record weak slots during marking or scavenges.
+  // Instead we do it once when we complete mark-compact cycle.
+  // Note that write barrier has no effect if we are already in the middle of
+  // compacting mark-sweep cycle and we have to record slots manually.
+  bool record_slots =
+      gc_state() == MARK_COMPACT &&
+      mark_compact_collector()->is_compacting();
+
   while (candidate != undefined) {
     // Check whether to keep the candidate in the list.
     Context* candidate_context = reinterpret_cast<Context*>(candidate);
@@ -1368,6 +1408,14 @@
                             Context::NEXT_CONTEXT_LINK,
                             retain,
                             UPDATE_WRITE_BARRIER);
+
+        if (record_slots) {
+          Object** next_context =
+              HeapObject::RawField(
+                  tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK));
+          mark_compact_collector()->RecordSlot(
+              next_context, next_context, retain);
+        }
       }
       // Retained context is new tail.
       candidate_context = reinterpret_cast<Context*>(retain);
@@ -1380,11 +1428,19 @@
           ProcessFunctionWeakReferences(
               this,
               candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
-              retainer);
+              retainer,
+              record_slots);
       candidate_context->set_unchecked(this,
                                        Context::OPTIMIZED_FUNCTIONS_LIST,
                                        function_list_head,
                                        UPDATE_WRITE_BARRIER);
+      if (record_slots) {
+        Object** optimized_functions =
+            HeapObject::RawField(
+                tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST));
+        mark_compact_collector()->RecordSlot(
+            optimized_functions, optimized_functions, function_list_head);
+      }
     }
 
     // Move to next element in the list.
@@ -1484,6 +1540,27 @@
 }
 
 
+STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
+
+
+INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
+                                              HeapObject* object,
+                                              int size));
+
+static HeapObject* EnsureDoubleAligned(Heap* heap,
+                                       HeapObject* object,
+                                       int size) {
+  if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
+    heap->CreateFillerObjectAt(object->address(), kPointerSize);
+    return HeapObject::FromAddress(object->address() + kPointerSize);
+  } else {
+    heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
+                               kPointerSize);
+    return object;
+  }
+}
+
+
 enum LoggingAndProfiling {
   LOGGING_AND_PROFILING_ENABLED,
   LOGGING_AND_PROFILING_DISABLED
@@ -1607,7 +1684,10 @@
     }
   }
 
-  template<ObjectContents object_contents, SizeRestriction size_restriction>
+
+  template<ObjectContents object_contents,
+           SizeRestriction size_restriction,
+           int alignment>
   static inline void EvacuateObject(Map* map,
                                     HeapObject** slot,
                                     HeapObject* object,
@@ -1616,19 +1696,26 @@
                 (object_size <= Page::kMaxNonCodeHeapObjectSize));
     SLOW_ASSERT(object->Size() == object_size);
 
+    int allocation_size = object_size;
+    if (alignment != kObjectAlignment) {
+      ASSERT(alignment == kDoubleAlignment);
+      allocation_size += kPointerSize;
+    }
+
     Heap* heap = map->GetHeap();
     if (heap->ShouldBePromoted(object->address(), object_size)) {
       MaybeObject* maybe_result;
 
       if ((size_restriction != SMALL) &&
-          (object_size > Page::kMaxNonCodeHeapObjectSize)) {
-        maybe_result = heap->lo_space()->AllocateRaw(object_size,
+          (allocation_size > Page::kMaxNonCodeHeapObjectSize)) {
+        maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
                                                      NOT_EXECUTABLE);
       } else {
         if (object_contents == DATA_OBJECT) {
-          maybe_result = heap->old_data_space()->AllocateRaw(object_size);
+          maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
         } else {
-          maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
+          maybe_result =
+              heap->old_pointer_space()->AllocateRaw(allocation_size);
         }
       }
 
@@ -1636,6 +1723,10 @@
       if (maybe_result->ToObject(&result)) {
         HeapObject* target = HeapObject::cast(result);
 
+        if (alignment != kObjectAlignment) {
+          target = EnsureDoubleAligned(heap, target, allocation_size);
+        }
+
         // Order is important: slot might be inside of the target if target
         // was allocated over a dead object and slot comes from the store
         // buffer.
@@ -1643,18 +1734,27 @@
         MigrateObject(heap, object, target, object_size);
 
         if (object_contents == POINTER_OBJECT) {
-          heap->promotion_queue()->insert(target, object_size);
+          if (map->instance_type() == JS_FUNCTION_TYPE) {
+            heap->promotion_queue()->insert(
+                target, JSFunction::kNonWeakFieldsEndOffset);
+          } else {
+            heap->promotion_queue()->insert(target, object_size);
+          }
         }
 
         heap->tracer()->increment_promoted_objects_size(object_size);
         return;
       }
     }
-    MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size);
+    MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
     heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
     Object* result = allocation->ToObjectUnchecked();
     HeapObject* target = HeapObject::cast(result);
 
+    if (alignment != kObjectAlignment) {
+      target = EnsureDoubleAligned(heap, target, allocation_size);
+    }
+
     // Order is important: slot might be inside of the target if target
     // was allocated over a dead object and slot comes from the store
     // buffer.
@@ -1690,7 +1790,7 @@
                                         HeapObject** slot,
                                         HeapObject* object) {
     int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
-    EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
+    EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
                                                  slot,
                                                  object,
                                                  object_size);
@@ -1702,10 +1802,11 @@
                                               HeapObject* object) {
     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
     int object_size = FixedDoubleArray::SizeFor(length);
-    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map,
-                                              slot,
-                                              object,
-                                              object_size);
+    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
+        map,
+        slot,
+        object,
+        object_size);
   }
 
 
@@ -1713,7 +1814,8 @@
                                        HeapObject** slot,
                                        HeapObject* object) {
     int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
-    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+        map, slot, object, object_size);
   }
 
 
@@ -1722,7 +1824,8 @@
                                             HeapObject* object) {
     int object_size = SeqAsciiString::cast(object)->
         SeqAsciiStringSize(map->instance_type());
-    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+        map, slot, object, object_size);
   }
 
 
@@ -1731,7 +1834,8 @@
                                               HeapObject* object) {
     int object_size = SeqTwoByteString::cast(object)->
         SeqTwoByteStringSize(map->instance_type());
-    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+    EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+        map, slot, object, object_size);
   }
 
 
@@ -1774,7 +1878,8 @@
     }
 
     int object_size = ConsString::kSize;
-    EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
+    EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
+        map, slot, object, object_size);
   }
 
   template<ObjectContents object_contents>
@@ -1784,14 +1889,16 @@
     static inline void VisitSpecialized(Map* map,
                                         HeapObject** slot,
                                         HeapObject* object) {
-      EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+      EvacuateObject<object_contents, SMALL, kObjectAlignment>(
+          map, slot, object, object_size);
     }
 
     static inline void Visit(Map* map,
                              HeapObject** slot,
                              HeapObject* object) {
       int object_size = map->instance_size();
-      EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+      EvacuateObject<object_contents, SMALL, kObjectAlignment>(
+          map, slot, object, object_size);
     }
   };
 
@@ -3827,6 +3934,16 @@
 }
 
 
+MaybeObject* Heap::AllocateJSModule() {
+  // Allocate a fresh map. Modules do not have a prototype.
+  Map* map;
+  MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize);
+  if (!maybe_map->To(&map)) return maybe_map;
+  // Allocate the object based on the map.
+  return AllocateJSObjectFromMap(map, TENURED);
+}
+
+
 MaybeObject* Heap::AllocateJSArrayAndStorage(
     ElementsKind elements_kind,
     int length,
@@ -3963,7 +4080,7 @@
   // Fill these accessors into the dictionary.
   DescriptorArray* descs = map->instance_descriptors();
   for (int i = 0; i < descs->number_of_descriptors(); i++) {
-    PropertyDetails details(descs->GetDetails(i));
+    PropertyDetails details = descs->GetDetails(i);
     ASSERT(details.type() == CALLBACKS);  // Only accessors are expected.
     PropertyDetails d =
         PropertyDetails(details.attributes(), CALLBACKS, details.index());
@@ -4656,6 +4773,11 @@
   AllocationSpace space =
       (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
   int size = FixedDoubleArray::SizeFor(length);
+
+#ifndef V8_HOST_ARCH_64_BIT
+  size += kPointerSize;
+#endif
+
   if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
     // Too big for new space.
     space = LO_SPACE;
@@ -4668,7 +4790,12 @@
   AllocationSpace retry_space =
       (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
 
-  return AllocateRaw(size, space, retry_space);
+  HeapObject* object;
+  { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
+    if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
+  }
+
+  return EnsureDoubleAligned(this, object, size);
 }
 
 
@@ -4701,6 +4828,22 @@
 }
 
 
+MaybeObject* Heap::AllocateModuleContext(Context* previous,
+                                         ScopeInfo* scope_info) {
+  Object* result;
+  { MaybeObject* maybe_result =
+        AllocateFixedArrayWithHoles(scope_info->ContextLength(), TENURED);
+    if (!maybe_result->ToObject(&result)) return maybe_result;
+  }
+  Context* context = reinterpret_cast<Context*>(result);
+  context->set_map_no_write_barrier(module_context_map());
+  context->set_previous(previous);
+  context->set_extension(scope_info);
+  context->set_global(previous->global());
+  return context;
+}
+
+
 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
   ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
   Object* result;
diff --git a/src/heap.h b/src/heap.h
index d1cdda0..af86f44 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -529,6 +529,8 @@
   MUST_USE_RESULT MaybeObject* AllocateJSObject(
       JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
 
+  MUST_USE_RESULT MaybeObject* AllocateJSModule();
+
   // Allocate a JSArray with no elements
   MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
       ElementsKind elements_kind,
@@ -820,6 +822,10 @@
   // Allocate a global (but otherwise uninitialized) context.
   MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
 
+  // Allocate a module context.
+  MUST_USE_RESULT MaybeObject* AllocateModuleContext(Context* previous,
+                                                     ScopeInfo* scope_info);
+
   // Allocate a function context.
   MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
                                                        JSFunction* function);
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index b55d40e..2c13b88 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -2272,6 +2272,13 @@
 }
 
 
+void HBitwise::PrintDataTo(StringStream* stream) {
+  stream->Add(Token::Name(op_));
+  stream->Add(" ");
+  HBitwiseBinaryOperation::PrintDataTo(stream);
+}
+
+
 Representation HPhi::InferredRepresentation() {
   bool double_occurred = false;
   bool int32_occurred = false;
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index 1754ad4..ba8d2e7 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -3353,6 +3353,8 @@
                                    HValue* left,
                                    HValue* right);
 
+  virtual void PrintDataTo(StringStream* stream);
+
   DECLARE_CONCRETE_INSTRUCTION(Bitwise)
 
  protected:
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 6ce1665..1fac70e 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -612,6 +612,7 @@
       graph_(NULL),
       current_block_(NULL),
       inlined_count_(0),
+      globals_(10),
       zone_(info->isolate()->zone()),
       inline_bailout_(false) {
   // This is not initialized in the initializer list because the
@@ -2550,7 +2551,7 @@
     // Handle implicit declaration of the function name in named function
     // expressions before other declarations.
     if (scope->is_function_scope() && scope->function() != NULL) {
-      HandleDeclaration(scope->function(), CONST, NULL, NULL);
+      VisitVariableDeclaration(scope->function());
     }
     VisitDeclarations(scope->declarations());
     AddSimulate(AstNode::kDeclarationsId);
@@ -2767,7 +2768,7 @@
   ASSERT(!HasStackOverflow());
   ASSERT(current_block() != NULL);
   ASSERT(current_block()->HasPredecessor());
-  if (stmt->block_scope() != NULL) {
+  if (stmt->scope() != NULL) {
     return Bailout("ScopedBlock");
   }
   BreakAndContinueInfo break_info(stmt);
@@ -3748,10 +3749,11 @@
     if (boilerplate->HasFastDoubleElements()) {
       *total_size += FixedDoubleArray::SizeFor(elements->length());
     } else if (boilerplate->HasFastElements()) {
+      Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
       int length = elements->length();
       for (int i = 0; i < length; i++) {
         if ((*max_properties)-- == 0) return false;
-        Handle<Object> value = JSObject::GetElement(boilerplate, i);
+        Handle<Object> value(fast_elements->get(i));
         if (value->IsJSObject()) {
           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
           if (!IsFastLiteral(value_object,
@@ -7160,90 +7162,50 @@
 
 
 void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) {
-  int length = declarations->length();
-  int global_count = 0;
-  for (int i = 0; i < declarations->length(); i++) {
-    Declaration* decl = declarations->at(i);
-    FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
-    HandleDeclaration(decl->proxy(),
-                      decl->mode(),
-                      fun_decl != NULL ? fun_decl->fun() : NULL,
-                      &global_count);
-  }
-
-  // Batch declare global functions and variables.
-  if (global_count > 0) {
+  ASSERT(globals_.is_empty());
+  AstVisitor::VisitDeclarations(declarations);
+  if (!globals_.is_empty()) {
     Handle<FixedArray> array =
-        isolate()->factory()->NewFixedArray(2 * global_count, TENURED);
-    for (int j = 0, i = 0; i < length; i++) {
-      Declaration* decl = declarations->at(i);
-      Variable* var = decl->proxy()->var();
-
-      if (var->IsUnallocated()) {
-        array->set(j++, *(var->name()));
-        FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
-        if (fun_decl == NULL) {
-          if (var->binding_needs_init()) {
-            // In case this binding needs initialization use the hole.
-            array->set_the_hole(j++);
-          } else {
-            array->set_undefined(j++);
-          }
-        } else {
-          Handle<SharedFunctionInfo> function =
-              Compiler::BuildFunctionInfo(fun_decl->fun(), info()->script());
-          // Check for stack-overflow exception.
-          if (function.is_null()) {
-            SetStackOverflow();
-            return;
-          }
-          array->set(j++, *function);
-        }
-      }
-    }
+       isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
+    for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
     int flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
                 DeclareGlobalsNativeFlag::encode(info()->is_native()) |
                 DeclareGlobalsLanguageMode::encode(info()->language_mode());
-    HInstruction* result =
-        new(zone()) HDeclareGlobals(environment()->LookupContext(),
-                                    array,
-                                    flags);
+    HInstruction* result = new(zone()) HDeclareGlobals(
+        environment()->LookupContext(), array, flags);
     AddInstruction(result);
+    globals_.Clear();
   }
 }
 
 
-void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
-                                      VariableMode mode,
-                                      FunctionLiteral* function,
-                                      int* global_count) {
-  Variable* var = proxy->var();
-  bool binding_needs_init =
-      (mode == CONST || mode == CONST_HARMONY || mode == LET);
-  switch (var->location()) {
+void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  VariableMode mode = declaration->mode();
+  Variable* variable = proxy->var();
+  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
+  switch (variable->location()) {
     case Variable::UNALLOCATED:
-      ++(*global_count);
+      globals_.Add(variable->name());
+      globals_.Add(variable->binding_needs_init()
+                       ? isolate()->factory()->the_hole_value()
+                       : isolate()->factory()->undefined_value());
       return;
     case Variable::PARAMETER:
     case Variable::LOCAL:
+      if (hole_init) {
+        HValue* value = graph()->GetConstantHole();
+        environment()->Bind(variable, value);
+      }
+      break;
     case Variable::CONTEXT:
-      if (binding_needs_init || function != NULL) {
-        HValue* value = NULL;
-        if (function != NULL) {
-          CHECK_ALIVE(VisitForValue(function));
-          value = Pop();
-        } else {
-          value = graph()->GetConstantHole();
-        }
-        if (var->IsContextSlot()) {
-          HValue* context = environment()->LookupContext();
-          HStoreContextSlot* store = new HStoreContextSlot(
-              context, var->index(), HStoreContextSlot::kNoCheck, value);
-          AddInstruction(store);
-          if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
-        } else {
-          environment()->Bind(var, value);
-        }
+      if (hole_init) {
+        HValue* value = graph()->GetConstantHole();
+        HValue* context = environment()->LookupContext();
+        HStoreContextSlot* store = new HStoreContextSlot(
+            context, variable->index(), HStoreContextSlot::kNoCheck, value);
+        AddInstruction(store);
+        if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
       }
       break;
     case Variable::LOOKUP:
@@ -7252,48 +7214,74 @@
 }
 
 
-void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
+void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  switch (variable->location()) {
+    case Variable::UNALLOCATED: {
+      globals_.Add(variable->name());
+      Handle<SharedFunctionInfo> function =
+          Compiler::BuildFunctionInfo(declaration->fun(), info()->script());
+      // Check for stack-overflow exception.
+      if (function.is_null()) return SetStackOverflow();
+      globals_.Add(function);
+      return;
+    }
+    case Variable::PARAMETER:
+    case Variable::LOCAL: {
+      CHECK_ALIVE(VisitForValue(declaration->fun()));
+      HValue* value = Pop();
+      environment()->Bind(variable, value);
+      break;
+    }
+    case Variable::CONTEXT: {
+      CHECK_ALIVE(VisitForValue(declaration->fun()));
+      HValue* value = Pop();
+      HValue* context = environment()->LookupContext();
+      HStoreContextSlot* store = new HStoreContextSlot(
+          context, variable->index(), HStoreContextSlot::kNoCheck, value);
+      AddInstruction(store);
+      if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
+      break;
+    }
+    case Variable::LOOKUP:
+      return Bailout("unsupported lookup slot in declaration");
+  }
+}
+
+
+void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* declaration) {
   UNREACHABLE();
 }
 
 
-void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* decl) {
+void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* declaration) {
   UNREACHABLE();
 }
 
 
-void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) {
-  UNREACHABLE();
-}
-
-
-void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* decl) {
-  UNREACHABLE();
-}
-
-
-void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* decl) {
+void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* declaration) {
   UNREACHABLE();
 }
 
 
 void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
-  // TODO(rossberg)
+  UNREACHABLE();
 }
 
 
 void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
-  // TODO(rossberg)
+  UNREACHABLE();
 }
 
 
 void HGraphBuilder::VisitModulePath(ModulePath* module) {
-  // TODO(rossberg)
+  UNREACHABLE();
 }
 
 
 void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
-  // TODO(rossberg)
+  UNREACHABLE();
 }
 
 
diff --git a/src/hydrogen.h b/src/hydrogen.h
index 5def407..b84ae70 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -913,11 +913,6 @@
   INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_DECLARATION)
 #undef INLINE_FUNCTION_GENERATOR_DECLARATION
 
-  void HandleDeclaration(VariableProxy* proxy,
-                         VariableMode mode,
-                         FunctionLiteral* function,
-                         int* global_count);
-
   void VisitDelete(UnaryOperation* expr);
   void VisitVoid(UnaryOperation* expr);
   void VisitTypeof(UnaryOperation* expr);
@@ -1167,6 +1162,7 @@
   HBasicBlock* current_block_;
 
   int inlined_count_;
+  ZoneList<Handle<Object> > globals_;
 
   Zone* zone_;
 
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 2287b63..471114c 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -6162,7 +6162,11 @@
   __ sub(ecx, edx);
   __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
   Label not_original_string;
-  __ j(not_equal, &not_original_string, Label::kNear);
+  // Shorter than original string's length: an actual substring.
+  __ j(below, &not_original_string, Label::kNear);
+  // Longer than original string's length or negative: unsafe arguments.
+  __ j(above, &runtime);
+  // Return original string.
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->sub_string_native(), 1);
   __ ret(3 * kPointerSize);
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index ea61910..cff6454 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -397,9 +397,25 @@
   // Allocate new FixedDoubleArray.
   // edx: receiver
   // edi: length of source FixedArray (smi-tagged)
-  __ lea(esi, Operand(edi, times_4, FixedDoubleArray::kHeaderSize));
+  __ lea(esi, Operand(edi,
+                      times_4,
+                      FixedDoubleArray::kHeaderSize + kPointerSize));
   __ AllocateInNewSpace(esi, eax, ebx, no_reg, &gc_required, TAG_OBJECT);
 
+  Label aligned, aligned_done;
+  __ test(eax, Immediate(kDoubleAlignmentMask - kHeapObjectTag));
+  __ j(zero, &aligned, Label::kNear);
+  __ mov(FieldOperand(eax, 0),
+         Immediate(masm->isolate()->factory()->one_pointer_filler_map()));
+  __ add(eax, Immediate(kPointerSize));
+  __ jmp(&aligned_done);
+
+  __ bind(&aligned);
+  __ mov(Operand(eax, esi, times_1, -kPointerSize-1),
+         Immediate(masm->isolate()->factory()->one_pointer_filler_map()));
+
+  __ bind(&aligned_done);
+
   // eax: destination FixedDoubleArray
   // edi: number of elements
   // edx: receiver
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 376671d..0fc5001 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -262,11 +262,11 @@
       // For named function expressions, declare the function name as a
       // constant.
       if (scope()->is_function_scope() && scope()->function() != NULL) {
-        VariableProxy* proxy = scope()->function();
-        ASSERT(proxy->var()->mode() == CONST ||
-               proxy->var()->mode() == CONST_HARMONY);
-        ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
-        EmitDeclaration(proxy, proxy->var()->mode(), NULL);
+        VariableDeclaration* function = scope()->function();
+        ASSERT(function->proxy()->var()->mode() == CONST ||
+               function->proxy()->var()->mode() == CONST_HARMONY);
+        ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
+        VisitVariableDeclaration(function);
       }
       VisitDeclarations(scope()->declarations());
     }
@@ -756,60 +756,51 @@
 }
 
 
-void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
-                                        VariableMode mode,
-                                        FunctionLiteral* function) {
+void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
+  // The variable in the declaration always resides in the current function
+  // context.
+  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+  if (FLAG_debug_code) {
+    // Check that we're not inside a with or catch context.
+    __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
+    __ cmp(ebx, isolate()->factory()->with_context_map());
+    __ Check(not_equal, "Declaration in with context.");
+    __ cmp(ebx, isolate()->factory()->catch_context_map());
+    __ Check(not_equal, "Declaration in catch context.");
+  }
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(
+    VariableDeclaration* declaration) {
   // If it was not possible to allocate the variable at compile time, we
   // need to "declare" it at runtime to make sure it actually exists in the
   // local context.
+  VariableProxy* proxy = declaration->proxy();
+  VariableMode mode = declaration->mode();
   Variable* variable = proxy->var();
-  bool binding_needs_init = (function == NULL) &&
-      (mode == CONST || mode == CONST_HARMONY || mode == LET);
+  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
   switch (variable->location()) {
     case Variable::UNALLOCATED:
-      ++global_count_;
+      globals_->Add(variable->name());
+      globals_->Add(variable->binding_needs_init()
+                        ? isolate()->factory()->the_hole_value()
+                        : isolate()->factory()->undefined_value());
       break;
 
     case Variable::PARAMETER:
     case Variable::LOCAL:
-      if (function != NULL) {
-        Comment cmnt(masm_, "[ Declaration");
-        VisitForAccumulatorValue(function);
-        __ mov(StackOperand(variable), result_register());
-      } else if (binding_needs_init) {
-        Comment cmnt(masm_, "[ Declaration");
+      if (hole_init) {
+        Comment cmnt(masm_, "[ VariableDeclaration");
         __ mov(StackOperand(variable),
                Immediate(isolate()->factory()->the_hole_value()));
       }
       break;
 
     case Variable::CONTEXT:
-      // The variable in the decl always resides in the current function
-      // context.
-      ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
-      if (FLAG_debug_code) {
-        // Check that we're not inside a with or catch context.
-        __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
-        __ cmp(ebx, isolate()->factory()->with_context_map());
-        __ Check(not_equal, "Declaration in with context.");
-        __ cmp(ebx, isolate()->factory()->catch_context_map());
-        __ Check(not_equal, "Declaration in catch context.");
-      }
-      if (function != NULL) {
-        Comment cmnt(masm_, "[ Declaration");
-        VisitForAccumulatorValue(function);
-        __ mov(ContextOperand(esi, variable->index()), result_register());
-        // We know that we have written a function, which is not a smi.
-        __ RecordWriteContextSlot(esi,
-                                  Context::SlotOffset(variable->index()),
-                                  result_register(),
-                                  ecx,
-                                  kDontSaveFPRegs,
-                                  EMIT_REMEMBERED_SET,
-                                  OMIT_SMI_CHECK);
-        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
-      } else if (binding_needs_init) {
-        Comment cmnt(masm_, "[ Declaration");
+      if (hole_init) {
+        Comment cmnt(masm_, "[ VariableDeclaration");
+        EmitDebugCheckDeclarationContext(variable);
         __ mov(ContextOperand(esi, variable->index()),
                Immediate(isolate()->factory()->the_hole_value()));
         // No write barrier since the hole value is in old space.
@@ -818,14 +809,12 @@
       break;
 
     case Variable::LOOKUP: {
-      Comment cmnt(masm_, "[ Declaration");
+      Comment cmnt(masm_, "[ VariableDeclaration");
       __ push(esi);
       __ push(Immediate(variable->name()));
-      // Declaration nodes are always introduced in one of four modes.
-      ASSERT(mode == VAR ||
-             mode == CONST ||
-             mode == CONST_HARMONY ||
-             mode == LET);
+      // VariableDeclaration nodes are always introduced in one of four modes.
+      ASSERT(mode == VAR || mode == LET ||
+             mode == CONST || mode == CONST_HARMONY);
       PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
           ? READ_ONLY : NONE;
       __ push(Immediate(Smi::FromInt(attr)));
@@ -833,9 +822,7 @@
       // Note: For variables we must not push an initial value (such as
       // 'undefined') because we may have a (legal) redeclaration and we
       // must not destroy the current value.
-      if (function != NULL) {
-        VisitForStackValue(function);
-      } else if (binding_needs_init) {
+      if (hole_init) {
         __ push(Immediate(isolate()->factory()->the_hole_value()));
       } else {
         __ push(Immediate(Smi::FromInt(0)));  // Indicates no initial value.
@@ -847,6 +834,118 @@
 }
 
 
+void FullCodeGenerator::VisitFunctionDeclaration(
+    FunctionDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  switch (variable->location()) {
+    case Variable::UNALLOCATED: {
+      globals_->Add(variable->name());
+      Handle<SharedFunctionInfo> function =
+          Compiler::BuildFunctionInfo(declaration->fun(), script());
+      // Check for stack-overflow exception.
+      if (function.is_null()) return SetStackOverflow();
+      globals_->Add(function);
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      VisitForAccumulatorValue(declaration->fun());
+      __ mov(StackOperand(variable), result_register());
+      break;
+    }
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      VisitForAccumulatorValue(declaration->fun());
+      __ mov(ContextOperand(esi, variable->index()), result_register());
+      // We know that we have written a function, which is not a smi.
+      __ RecordWriteContextSlot(esi,
+                                Context::SlotOffset(variable->index()),
+                                result_register(),
+                                ecx,
+                                kDontSaveFPRegs,
+                                EMIT_REMEMBERED_SET,
+                                OMIT_SMI_CHECK);
+      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
+      break;
+    }
+
+    case Variable::LOOKUP: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      __ push(esi);
+      __ push(Immediate(variable->name()));
+      __ push(Immediate(Smi::FromInt(NONE)));
+      VisitForStackValue(declaration->fun());
+      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+      break;
+    }
+  }
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  Handle<JSModule> instance = declaration->module()->interface()->Instance();
+  ASSERT(!instance.is_null());
+
+  switch (variable->location()) {
+    case Variable::UNALLOCATED: {
+      Comment cmnt(masm_, "[ ModuleDeclaration");
+      globals_->Add(variable->name());
+      globals_->Add(instance);
+      Visit(declaration->module());
+      break;
+    }
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ ModuleDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      __ mov(ContextOperand(esi, variable->index()), Immediate(instance));
+      Visit(declaration->module());
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL:
+    case Variable::LOOKUP:
+      UNREACHABLE();
+  }
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  switch (variable->location()) {
+    case Variable::UNALLOCATED:
+      // TODO(rossberg)
+      break;
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ ImportDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      // TODO(rossberg)
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL:
+    case Variable::LOOKUP:
+      UNREACHABLE();
+  }
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
+  // TODO(rossberg)
+}
+
+
 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
   // Call the runtime to declare the globals.
   __ push(esi);  // The context is the first argument.
@@ -4430,7 +4529,8 @@
 
 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   Scope* declaration_scope = scope()->DeclarationScope();
-  if (declaration_scope->is_global_scope()) {
+  if (declaration_scope->is_global_scope() ||
+      declaration_scope->is_module_scope()) {
     // Contexts nested in the global context have a canonical empty function
     // as their closure, not the anonymous closure containing the global
     // code.  Pass a smi sentinel and let the runtime look up the empty
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 6dadad6..67c23ef 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -4514,9 +4514,10 @@
         __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high));
       }
     } else if (elements->IsFixedArray()) {
+      Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
       for (int i = 0; i < elements_length; i++) {
         int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
-        Handle<Object> value = JSObject::GetElement(object, i);
+        Handle<Object> value(fast_elements->get(i));
         if (value->IsJSObject()) {
           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
           __ lea(ecx, Operand(result, *offset));
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index d783415..f8fb721 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -4030,6 +4030,7 @@
 
     int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
     __ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
+
     // Restore the key, which is known to be the array length.
     __ mov(ecx, Immediate(0));
 
diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc
index 7bbd521..2413b67 100644
--- a/src/incremental-marking.cc
+++ b/src/incremental-marking.cc
@@ -830,6 +830,19 @@
         MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
 
         VisitGlobalContext(ctx, &marking_visitor);
+      } else if (map->instance_type() == JS_FUNCTION_TYPE) {
+        marking_visitor.VisitPointers(
+            HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
+            HeapObject::RawField(obj, JSFunction::kCodeEntryOffset));
+
+        marking_visitor.VisitCodeEntry(
+            obj->address() + JSFunction::kCodeEntryOffset);
+
+        marking_visitor.VisitPointers(
+            HeapObject::RawField(obj,
+                                 JSFunction::kCodeEntryOffset + kPointerSize),
+            HeapObject::RawField(obj,
+                                 JSFunction::kNonWeakFieldsEndOffset));
       } else {
         obj->IterateBody(map->instance_type(), size, &marking_visitor);
       }
diff --git a/src/interface.cc b/src/interface.cc
index e344b86..7836110 100644
--- a/src/interface.cc
+++ b/src/interface.cc
@@ -79,7 +79,7 @@
     PrintF("%*sthis = ", Nesting::current(), "");
     this->Print(Nesting::current());
     PrintF("%*s%s : ", Nesting::current(), "",
-           (*reinterpret_cast<String**>(name))->ToAsciiArray());
+           (*static_cast<String**>(name))->ToAsciiArray());
     interface->Print(Nesting::current());
   }
 #endif
@@ -97,7 +97,7 @@
 #ifdef DEBUG
     Nesting nested;
 #endif
-    reinterpret_cast<Interface*>(p->value)->Unify(interface, ok);
+    static_cast<Interface*>(p->value)->Unify(interface, ok);
   }
 
 #ifdef DEBUG
@@ -180,6 +180,15 @@
     return;
   }
 
+  // Merge instance.
+  if (!that->instance_.is_null()) {
+    if (!this->instance_.is_null() && *this->instance_ != *that->instance_) {
+      *ok = false;
+      return;
+    }
+    this->instance_ = that->instance_;
+  }
+
   // Merge interfaces.
   this->flags_ |= that->flags_;
   that->forward_ = this;
diff --git a/src/interface.h b/src/interface.h
index c2991cb..580f082 100644
--- a/src/interface.h
+++ b/src/interface.h
@@ -86,6 +86,12 @@
     if (*ok) Chase()->flags_ |= MODULE;
   }
 
+  // Set associated instance object.
+  void MakeSingleton(Handle<JSModule> instance, bool* ok) {
+    *ok = IsModule() && Chase()->instance_.is_null();
+    if (*ok) Chase()->instance_ = instance;
+  }
+
   // Do not allow any further refinements, directly or through unification.
   void Freeze(bool* ok) {
     *ok = IsValue() || IsModule();
@@ -95,9 +101,6 @@
   // ---------------------------------------------------------------------------
   // Accessors.
 
-  // Look up an exported name. Returns NULL if not (yet) defined.
-  Interface* Lookup(Handle<String> name);
-
   // Check whether this is still a fully undetermined type.
   bool IsUnknown() { return Chase()->flags_ == NONE; }
 
@@ -110,6 +113,42 @@
   // Check whether this is closed (i.e. fully determined).
   bool IsFrozen() { return Chase()->flags_ & FROZEN; }
 
+  Handle<JSModule> Instance() { return Chase()->instance_; }
+
+  // Look up an exported name. Returns NULL if not (yet) defined.
+  Interface* Lookup(Handle<String> name);
+
+  // ---------------------------------------------------------------------------
+  // Iterators.
+
+  // Use like:
+  //   for (auto it = interface->iterator(); !it.done(); it.Advance()) {
+  //     ... it.name() ... it.interface() ...
+  //   }
+  class Iterator {
+   public:
+    bool done() const { return entry_ == NULL; }
+    Handle<String> name() const {
+      ASSERT(!done());
+      return Handle<String>(*static_cast<String**>(entry_->key));
+    }
+    Interface* interface() const {
+      ASSERT(!done());
+      return static_cast<Interface*>(entry_->value);
+    }
+    void Advance() { entry_ = exports_->Next(entry_); }
+
+   private:
+    friend class Interface;
+    explicit Iterator(const ZoneHashMap* exports)
+        : exports_(exports), entry_(exports ? exports->Start() : NULL) {}
+
+    const ZoneHashMap* exports_;
+    ZoneHashMap::Entry* entry_;
+  };
+
+  Iterator iterator() const { return Iterator(this->exports_); }
+
   // ---------------------------------------------------------------------------
   // Debugging.
 #ifdef DEBUG
@@ -129,6 +168,7 @@
   int flags_;
   Interface* forward_;     // Unification link
   ZoneHashMap* exports_;   // Module exports and their types (allocated lazily)
+  Handle<JSModule> instance_;
 
   explicit Interface(int flags)
     : flags_(flags),
diff --git a/src/jsregexp.cc b/src/jsregexp.cc
index b7d0d30..1e413d8 100644
--- a/src/jsregexp.cc
+++ b/src/jsregexp.cc
@@ -108,6 +108,30 @@
 }
 
 
+ContainedInLattice AddRange(ContainedInLattice containment,
+                            const int* ranges,
+                            int ranges_length,
+                            Interval new_range) {
+  ASSERT((ranges_length & 1) == 1);
+  ASSERT(ranges[ranges_length - 1] == String::kMaxUtf16CodeUnit + 1);
+  if (containment == kLatticeUnknown) return containment;
+  bool inside = false;
+  int last = 0;
+  for (int i = 0; i < ranges_length; inside = !inside, last = ranges[i], i++) {
+    // Consider the range from last to ranges[i].
+    // We haven't got to the new range yet.
+    if (ranges[i] <= new_range.from()) continue;
+    // New range is wholly inside last-ranges[i].  Note that new_range.to() is
+    // inclusive, but the values in ranges are not.
+    if (last <= new_range.from() && new_range.to() < ranges[i]) {
+      return Combine(containment, inside ? kLatticeIn : kLatticeOut);
+    }
+    return kLatticeUnknown;
+  }
+  return containment;
+}
+
+
 // More makes code generation slower, less makes V8 benchmark score lower.
 const int kMaxLookaheadForBoyerMoore = 8;
 // In a 3-character pattern you can maximally step forwards 3 characters
@@ -2157,6 +2181,7 @@
   } else if (type_ != POSITIVE_SUBMATCH_SUCCESS) {
     on_success()->FillInBMInfo(offset, bm, not_at_start);
   }
+  SaveBMInfo(bm, not_at_start, offset);
 }
 
 
@@ -2181,6 +2206,7 @@
   // Match the behaviour of EatsAtLeast on this node.
   if (type() == AT_START && not_at_start) return;
   on_success()->FillInBMInfo(offset, bm, not_at_start);
+  SaveBMInfo(bm, not_at_start, offset);
 }
 
 
@@ -2617,12 +2643,14 @@
 
 
 void LoopChoiceNode::FillInBMInfo(
-    int offset, BoyerMooreLookahead* bm, bool nas) {
+    int offset, BoyerMooreLookahead* bm, bool not_at_start) {
   if (body_can_be_zero_length_) {
     bm->SetRest(offset);
+    SaveBMInfo(bm, not_at_start, offset);
     return;
   }
-  ChoiceNode::FillInBMInfo(offset, bm, nas);
+  ChoiceNode::FillInBMInfo(offset, bm, not_at_start);
+  SaveBMInfo(bm, not_at_start, offset);
 }
 
 
@@ -2710,110 +2738,83 @@
 }
 
 
-// Emit the code to handle \b and \B (word-boundary or non-word-boundary)
-// when we know whether the next character must be a word character or not.
-static void EmitHalfBoundaryCheck(AssertionNode::AssertionNodeType type,
-                                  RegExpCompiler* compiler,
-                                  RegExpNode* on_success,
-                                  Trace* trace) {
+// Emit the code to handle \b and \B (word-boundary or non-word-boundary).
+void AssertionNode::EmitBoundaryCheck(RegExpCompiler* compiler, Trace* trace) {
   RegExpMacroAssembler* assembler = compiler->macro_assembler();
-  Label done;
-
-  Trace new_trace(*trace);
-
-  bool expect_word_character = (type == AssertionNode::AFTER_WORD_CHARACTER);
-  Label* on_word = expect_word_character ? &done : new_trace.backtrack();
-  Label* on_non_word = expect_word_character ? new_trace.backtrack() : &done;
-
-  // Check whether previous character was a word character.
-  switch (trace->at_start()) {
-    case Trace::TRUE:
-      if (expect_word_character) {
-        assembler->GoTo(on_non_word);
-      }
-      break;
-    case Trace::UNKNOWN:
-      ASSERT_EQ(0, trace->cp_offset());
-      assembler->CheckAtStart(on_non_word);
-      // Fall through.
-    case Trace::FALSE:
-      int prev_char_offset = trace->cp_offset() - 1;
-      assembler->LoadCurrentCharacter(prev_char_offset, NULL, false, 1);
-      EmitWordCheck(assembler, on_word, on_non_word, expect_word_character);
-      // We may or may not have loaded the previous character.
-      new_trace.InvalidateCurrentCharacter();
+  Trace::TriBool next_is_word_character = Trace::UNKNOWN;
+  bool not_at_start = (trace->at_start() == Trace::FALSE);
+  BoyerMooreLookahead* lookahead = bm_info(not_at_start);
+  if (lookahead == NULL) {
+    int eats_at_least =
+        Min(kMaxLookaheadForBoyerMoore,
+            EatsAtLeast(kMaxLookaheadForBoyerMoore, 0, not_at_start));
+    if (eats_at_least >= 1) {
+      BoyerMooreLookahead* bm =
+          new BoyerMooreLookahead(eats_at_least, compiler);
+      FillInBMInfo(0, bm, not_at_start);
+      if (bm->at(0)->is_non_word()) next_is_word_character = Trace::FALSE;
+      if (bm->at(0)->is_word()) next_is_word_character = Trace::TRUE;
+    }
+  } else {
+    if (lookahead->at(0)->is_non_word()) next_is_word_character = Trace::FALSE;
+    if (lookahead->at(0)->is_word()) next_is_word_character = Trace::TRUE;
   }
+  bool at_boundary = (type_ == AssertionNode::AT_BOUNDARY);
+  if (next_is_word_character == Trace::UNKNOWN) {
+    Label before_non_word;
+    Label before_word;
+    if (trace->characters_preloaded() != 1) {
+      assembler->LoadCurrentCharacter(trace->cp_offset(), &before_non_word);
+    }
+    // Fall through on non-word.
+    EmitWordCheck(assembler, &before_word, &before_non_word, false);
+    // Next character is not a word character.
+    assembler->Bind(&before_non_word);
+    Label ok;
+    BacktrackIfPrevious(compiler, trace, at_boundary ? kIsNonWord : kIsWord);
+    assembler->GoTo(&ok);
 
-  assembler->Bind(&done);
-
-  on_success->Emit(compiler, &new_trace);
+    assembler->Bind(&before_word);
+    BacktrackIfPrevious(compiler, trace, at_boundary ? kIsWord : kIsNonWord);
+    assembler->Bind(&ok);
+  } else if (next_is_word_character == Trace::TRUE) {
+    BacktrackIfPrevious(compiler, trace, at_boundary ? kIsWord : kIsNonWord);
+  } else {
+    ASSERT(next_is_word_character == Trace::FALSE);
+    BacktrackIfPrevious(compiler, trace, at_boundary ? kIsNonWord : kIsWord);
+  }
 }
 
 
-// Emit the code to handle \b and \B (word-boundary or non-word-boundary).
-static void EmitBoundaryCheck(AssertionNode::AssertionNodeType type,
-                              RegExpCompiler* compiler,
-                              RegExpNode* on_success,
-                              Trace* trace) {
+void AssertionNode::BacktrackIfPrevious(
+    RegExpCompiler* compiler,
+    Trace* trace,
+    AssertionNode::IfPrevious backtrack_if_previous) {
   RegExpMacroAssembler* assembler = compiler->macro_assembler();
-  Label before_non_word;
-  Label before_word;
-  if (trace->characters_preloaded() != 1) {
-    assembler->LoadCurrentCharacter(trace->cp_offset(), &before_non_word);
-  }
-  // Fall through on non-word.
-  EmitWordCheck(assembler, &before_word, &before_non_word, false);
-
-  // We will be loading the previous character into the current character
-  // register.
   Trace new_trace(*trace);
   new_trace.InvalidateCurrentCharacter();
 
-  Label ok;
-  Label* boundary;
-  Label* not_boundary;
-  if (type == AssertionNode::AT_BOUNDARY) {
-    boundary = &ok;
-    not_boundary = new_trace.backtrack();
-  } else {
-    not_boundary = &ok;
-    boundary = new_trace.backtrack();
-  }
+  Label fall_through, dummy;
 
-  // Next character is not a word character.
-  assembler->Bind(&before_non_word);
+  Label* non_word = backtrack_if_previous == kIsNonWord ?
+                    new_trace.backtrack() :
+                    &fall_through;
+  Label* word = backtrack_if_previous == kIsNonWord ?
+                &fall_through :
+                new_trace.backtrack();
+
   if (new_trace.cp_offset() == 0) {
     // The start of input counts as a non-word character, so the question is
     // decided if we are at the start.
-    assembler->CheckAtStart(not_boundary);
+    assembler->CheckAtStart(non_word);
   }
   // We already checked that we are not at the start of input so it must be
   // OK to load the previous character.
-  assembler->LoadCurrentCharacter(new_trace.cp_offset() - 1,
-                                  &ok,  // Unused dummy label in this call.
-                                  false);
-  // Fall through on non-word.
-  EmitWordCheck(assembler, boundary, not_boundary, false);
-  assembler->GoTo(not_boundary);
+  assembler->LoadCurrentCharacter(new_trace.cp_offset() - 1, &dummy, false);
+  EmitWordCheck(assembler, word, non_word, backtrack_if_previous == kIsNonWord);
 
-  // Next character is a word character.
-  assembler->Bind(&before_word);
-  if (new_trace.cp_offset() == 0) {
-    // The start of input counts as a non-word character, so the question is
-    // decided if we are at the start.
-    assembler->CheckAtStart(boundary);
-  }
-  // We already checked that we are not at the start of input so it must be
-  // OK to load the previous character.
-  assembler->LoadCurrentCharacter(new_trace.cp_offset() - 1,
-                                  &ok,  // Unused dummy label in this call.
-                                  false);
-  bool fall_through_on_word = (type == AssertionNode::AT_NON_BOUNDARY);
-  EmitWordCheck(assembler, not_boundary, boundary, fall_through_on_word);
-
-  assembler->Bind(&ok);
-
-  on_success->Emit(compiler, &new_trace);
+  assembler->Bind(&fall_through);
+  on_success()->Emit(compiler, &new_trace);
 }
 
 
@@ -2861,13 +2862,9 @@
       return;
     case AT_BOUNDARY:
     case AT_NON_BOUNDARY: {
-      EmitBoundaryCheck(type_, compiler, on_success(), trace);
+      EmitBoundaryCheck(compiler, trace);
       return;
     }
-    case AFTER_WORD_CHARACTER:
-    case AFTER_NONWORD_CHARACTER: {
-      EmitHalfBoundaryCheck(type_, compiler, on_success(), trace);
-    }
   }
   on_success()->Emit(compiler, trace);
 }
@@ -3277,24 +3274,74 @@
 };
 
 
+// The '2' variant is has inclusive from and exclusive to.
+static const int kSpaceRanges[] = { '\t', '\r' + 1, ' ', ' ' + 1, 0x00A0,
+    0x00A1, 0x1680, 0x1681, 0x180E, 0x180F, 0x2000, 0x200B, 0x2028, 0x202A,
+    0x202F, 0x2030, 0x205F, 0x2060, 0x3000, 0x3001, 0xFEFF, 0xFF00, 0x10000 };
+static const int kSpaceRangeCount = ARRAY_SIZE(kSpaceRanges);
+
+static const int kWordRanges[] = {
+    '0', '9' + 1, 'A', 'Z' + 1, '_', '_' + 1, 'a', 'z' + 1, 0x10000 };
+static const int kWordRangeCount = ARRAY_SIZE(kWordRanges);
+static const int kDigitRanges[] = { '0', '9' + 1, 0x10000 };
+static const int kDigitRangeCount = ARRAY_SIZE(kDigitRanges);
+static const int kSurrogateRanges[] = { 0xd800, 0xe000, 0x10000 };
+static const int kSurrogateRangeCount = ARRAY_SIZE(kSurrogateRanges);
+static const int kLineTerminatorRanges[] = { 0x000A, 0x000B, 0x000D, 0x000E,
+    0x2028, 0x202A, 0x10000 };
+static const int kLineTerminatorRangeCount = ARRAY_SIZE(kLineTerminatorRanges);
+
+
+void BoyerMoorePositionInfo::Set(int character) {
+  SetInterval(Interval(character, character));
+}
+
+
+void BoyerMoorePositionInfo::SetInterval(const Interval& interval) {
+  s_ = AddRange(s_, kSpaceRanges, kSpaceRangeCount, interval);
+  w_ = AddRange(w_, kWordRanges, kWordRangeCount, interval);
+  d_ = AddRange(d_, kDigitRanges, kDigitRangeCount, interval);
+  surrogate_ =
+      AddRange(surrogate_, kSurrogateRanges, kSurrogateRangeCount, interval);
+  if (interval.to() - interval.from() >= kMapSize - 1) {
+    if (map_count_ != kMapSize) {
+      map_count_ = kMapSize;
+      for (int i = 0; i < kMapSize; i++) map_->at(i) = true;
+    }
+    return;
+  }
+  for (int i = interval.from(); i <= interval.to(); i++) {
+    int mod_character = (i & kMask);
+    if (!map_->at(mod_character)) {
+      map_count_++;
+      map_->at(mod_character) = true;
+    }
+    if (map_count_ == kMapSize) return;
+  }
+}
+
+
+void BoyerMoorePositionInfo::SetAll() {
+  s_ = w_ = d_ = kLatticeUnknown;
+  if (map_count_ != kMapSize) {
+    map_count_ = kMapSize;
+    for (int i = 0; i < kMapSize; i++) map_->at(i) = true;
+  }
+}
+
+
 BoyerMooreLookahead::BoyerMooreLookahead(
-    int length, int map_length, RegExpCompiler* compiler)
+    int length, RegExpCompiler* compiler)
     : length_(length),
-      map_length_(map_length),
       compiler_(compiler) {
-  ASSERT(IsPowerOf2(map_length));
   if (compiler->ascii()) {
     max_char_ = String::kMaxAsciiCharCode;
   } else {
     max_char_ = String::kMaxUtf16CodeUnit;
   }
-  bitmaps_ = new ZoneList<ZoneList<bool>*>(length);
+  bitmaps_ = new ZoneList<BoyerMoorePositionInfo*>(length);
   for (int i = 0; i < length; i++) {
-    bitmaps_->Add(new ZoneList<bool>(map_length));
-    ZoneList<bool>* map = bitmaps_->at(i);
-    for (int i = 0; i < map_length; i++) {
-      map->Add(false);
-    }
+    bitmaps_->Add(new BoyerMoorePositionInfo());
   }
 }
 
@@ -3304,8 +3351,11 @@
 // different parameters at once this is a tradeoff.
 bool BoyerMooreLookahead::FindWorthwhileInterval(int* from, int* to) {
   int biggest_points = 0;
+  // If more than 32 characters out of 128 can occur it is unlikely that we can
+  // be lucky enough to step forwards much of the time.
+  const int kMaxMax = 32;
   for (int max_number_of_chars = 4;
-       max_number_of_chars < kTooManyCharacters;
+       max_number_of_chars < kMaxMax;
        max_number_of_chars *= 2) {
     biggest_points =
         FindBestInterval(max_number_of_chars, biggest_points, from, to);
@@ -3332,7 +3382,7 @@
     bool union_map[kSize];
     for (int j = 0; j < kSize; j++) union_map[j] = false;
     while (i < length_ && Count(i) <= max_number_of_chars) {
-      ZoneList<bool>* map = bitmaps_->at(i);
+      BoyerMoorePositionInfo* map = bitmaps_->at(i);
       for (int j = 0; j < kSize; j++) union_map[j] |= map->at(j);
       i++;
     }
@@ -3387,8 +3437,8 @@
   int skip = max_lookahead + 1 - min_lookahead;
 
   for (int i = max_lookahead; i >= min_lookahead; i--) {
-    ZoneList<bool>* map = bitmaps_->at(i);
-    for (int j = 0; j < map_length_; j++) {
+    BoyerMoorePositionInfo* map = bitmaps_->at(i);
+    for (int j = 0; j < kSize; j++) {
       if (map->at(j)) {
         boolean_skip_table->set(j, kDontSkipArrayEntry);
       }
@@ -3401,29 +3451,29 @@
 
 // See comment above on the implementation of GetSkipTable.
 bool BoyerMooreLookahead::EmitSkipInstructions(RegExpMacroAssembler* masm) {
+  const int kSize = RegExpMacroAssembler::kTableSize;
+
   int min_lookahead = 0;
   int max_lookahead = 0;
 
   if (!FindWorthwhileInterval(&min_lookahead, &max_lookahead)) return false;
 
   bool found_single_character = false;
-  bool abandoned_search_for_single_character = false;
   int single_character = 0;
   for (int i = max_lookahead; i >= min_lookahead; i--) {
-    ZoneList<bool>* map = bitmaps_->at(i);
-    for (int j = 0; j < map_length_; j++) {
+    BoyerMoorePositionInfo* map = bitmaps_->at(i);
+    if (map->map_count() > 1 ||
+        (found_single_character && map->map_count() != 0)) {
+      found_single_character = false;
+      break;
+    }
+    for (int j = 0; j < kSize; j++) {
       if (map->at(j)) {
-        if (found_single_character) {
-          found_single_character = false;  // Found two.
-          abandoned_search_for_single_character = true;
-          break;
-        } else {
-          found_single_character = true;
-          single_character = j;
-        }
+        found_single_character = true;
+        single_character = j;
+        break;
       }
     }
-    if (abandoned_search_for_single_character) break;
   }
 
   int lookahead_width = max_lookahead + 1 - min_lookahead;
@@ -3437,8 +3487,7 @@
     Label cont, again;
     masm->Bind(&again);
     masm->LoadCurrentCharacter(max_lookahead, &cont, true);
-    if (max_char_ > map_length_) {
-      ASSERT(map_length_ == RegExpMacroAssembler::kTableSize);
+    if (max_char_ > kSize) {
       masm->CheckCharacterAfterAnd(single_character,
                                    RegExpMacroAssembler::kTableMask,
                                    &cont);
@@ -3452,7 +3501,7 @@
   }
 
   Handle<ByteArray> boolean_skip_table =
-      FACTORY->NewByteArray(map_length_, TENURED);
+      FACTORY->NewByteArray(kSize, TENURED);
   int skip_distance = GetSkipTable(
       min_lookahead, max_lookahead, boolean_skip_table);
   ASSERT(skip_distance != 0);
@@ -3631,16 +3680,20 @@
         // not be atoms, they can be any reasonably limited character class or
         // small alternation.
         ASSERT(trace->is_trivial());  // This is the case on LoopChoiceNodes.
-        eats_at_least =
-            Min(kMaxLookaheadForBoyerMoore,
-                EatsAtLeast(kMaxLookaheadForBoyerMoore, 0, not_at_start));
-        if (eats_at_least >= 1) {
-          BoyerMooreLookahead bm(eats_at_least,
-                                 RegExpMacroAssembler::kTableSize,
-                                 compiler);
-          GuardedAlternative alt0 = alternatives_->at(0);
-          alt0.node()->FillInBMInfo(0, &bm, not_at_start);
-          skip_was_emitted = bm.EmitSkipInstructions(macro_assembler);
+        BoyerMooreLookahead* lookahead = bm_info(not_at_start);
+        if (lookahead == NULL) {
+          eats_at_least =
+              Min(kMaxLookaheadForBoyerMoore,
+                  EatsAtLeast(kMaxLookaheadForBoyerMoore, 0, not_at_start));
+          if (eats_at_least >= 1) {
+            BoyerMooreLookahead* bm =
+                new BoyerMooreLookahead(eats_at_least, compiler);
+            GuardedAlternative alt0 = alternatives_->at(0);
+            alt0.node()->FillInBMInfo(0, bm, not_at_start);
+            skip_was_emitted = bm->EmitSkipInstructions(macro_assembler);
+          }
+        } else {
+          skip_was_emitted = lookahead->EmitSkipInstructions(macro_assembler);
         }
       }
     }
@@ -4203,12 +4256,6 @@
     case AssertionNode::AFTER_NEWLINE:
       stream()->Add("label=\"(?<=\\n)\", shape=septagon");
       break;
-    case AssertionNode::AFTER_WORD_CHARACTER:
-      stream()->Add("label=\"(?<=\\w)\", shape=septagon");
-      break;
-    case AssertionNode::AFTER_NONWORD_CHARACTER:
-      stream()->Add("label=\"(?<=\\W)\", shape=septagon");
-      break;
   }
   stream()->Add("];\n");
   PrintAttributes(that);
@@ -4313,21 +4360,6 @@
 // -------------------------------------------------------------------
 // Tree to graph conversion
 
-static const uc16 kSpaceRanges[] = { 0x0009, 0x000D, 0x0020, 0x0020, 0x00A0,
-    0x00A0, 0x1680, 0x1680, 0x180E, 0x180E, 0x2000, 0x200A, 0x2028, 0x2029,
-    0x202F, 0x202F, 0x205F, 0x205F, 0x3000, 0x3000, 0xFEFF, 0xFEFF };
-static const int kSpaceRangeCount = ARRAY_SIZE(kSpaceRanges);
-
-static const uc16 kWordRanges[] = { '0', '9', 'A', 'Z', '_', '_', 'a', 'z' };
-static const int kWordRangeCount = ARRAY_SIZE(kWordRanges);
-
-static const uc16 kDigitRanges[] = { '0', '9' };
-static const int kDigitRangeCount = ARRAY_SIZE(kDigitRanges);
-
-static const uc16 kLineTerminatorRanges[] = { 0x000A, 0x000A, 0x000D, 0x000D,
-    0x2028, 0x2029 };
-static const int kLineTerminatorRangeCount = ARRAY_SIZE(kLineTerminatorRanges);
-
 RegExpNode* RegExpAtom::ToNode(RegExpCompiler* compiler,
                                RegExpNode* on_success) {
   ZoneList<TextElement>* elms = new ZoneList<TextElement>(1);
@@ -4341,9 +4373,12 @@
   return new TextNode(elements(), on_success);
 }
 
+
 static bool CompareInverseRanges(ZoneList<CharacterRange>* ranges,
-                                 const uc16* special_class,
+                                 const int* special_class,
                                  int length) {
+  length--;  // Remove final 0x10000.
+  ASSERT(special_class[length] == 0x10000);
   ASSERT(ranges->length() != 0);
   ASSERT(length != 0);
   ASSERT(special_class[0] != 0);
@@ -4359,7 +4394,7 @@
       return false;
     }
     range = ranges->at((i >> 1) + 1);
-    if (special_class[i+1] != range.from() - 1) {
+    if (special_class[i+1] != range.from()) {
       return false;
     }
   }
@@ -4371,14 +4406,17 @@
 
 
 static bool CompareRanges(ZoneList<CharacterRange>* ranges,
-                          const uc16* special_class,
+                          const int* special_class,
                           int length) {
+  length--;  // Remove final 0x10000.
+  ASSERT(special_class[length] == 0x10000);
   if (ranges->length() * 2 != length) {
     return false;
   }
   for (int i = 0; i < length; i += 2) {
     CharacterRange range = ranges->at(i >> 1);
-    if (range.from() != special_class[i] || range.to() != special_class[i+1]) {
+    if (range.from() != special_class[i] ||
+        range.to() != special_class[i + 1] - 1) {
       return false;
     }
   }
@@ -4779,27 +4817,31 @@
 }
 
 
-static void AddClass(const uc16* elmv,
+static void AddClass(const int* elmv,
                      int elmc,
                      ZoneList<CharacterRange>* ranges) {
+  elmc--;
+  ASSERT(elmv[elmc] == 0x10000);
   for (int i = 0; i < elmc; i += 2) {
-    ASSERT(elmv[i] <= elmv[i + 1]);
-    ranges->Add(CharacterRange(elmv[i], elmv[i + 1]));
+    ASSERT(elmv[i] < elmv[i + 1]);
+    ranges->Add(CharacterRange(elmv[i], elmv[i + 1] - 1));
   }
 }
 
 
-static void AddClassNegated(const uc16 *elmv,
+static void AddClassNegated(const int *elmv,
                             int elmc,
                             ZoneList<CharacterRange>* ranges) {
+  elmc--;
+  ASSERT(elmv[elmc] == 0x10000);
   ASSERT(elmv[0] != 0x0000);
   ASSERT(elmv[elmc-1] != String::kMaxUtf16CodeUnit);
   uc16 last = 0x0000;
   for (int i = 0; i < elmc; i += 2) {
     ASSERT(last <= elmv[i] - 1);
-    ASSERT(elmv[i] <= elmv[i + 1]);
+    ASSERT(elmv[i] < elmv[i + 1]);
     ranges->Add(CharacterRange(last, elmv[i] - 1));
-    last = elmv[i + 1] + 1;
+    last = elmv[i + 1];
   }
   ranges->Add(CharacterRange(last, String::kMaxUtf16CodeUnit));
 }
@@ -4850,8 +4892,8 @@
 }
 
 
-Vector<const uc16> CharacterRange::GetWordBounds() {
-  return Vector<const uc16>(kWordRanges, kWordRangeCount);
+Vector<const int> CharacterRange::GetWordBounds() {
+  return Vector<const int>(kWordRanges, kWordRangeCount - 1);
 }
 
 
@@ -4883,7 +4925,7 @@
 
 
 void CharacterRange::Split(ZoneList<CharacterRange>* base,
-                           Vector<const uc16> overlay,
+                           Vector<const int> overlay,
                            ZoneList<CharacterRange>** included,
                            ZoneList<CharacterRange>** excluded) {
   ASSERT_EQ(NULL, *included);
@@ -4892,7 +4934,7 @@
   for (int i = 0; i < base->length(); i++)
     table.AddRange(base->at(i), CharacterRangeSplitter::kInBase);
   for (int i = 0; i < overlay.length(); i += 2) {
-    table.AddRange(CharacterRange(overlay[i], overlay[i+1]),
+    table.AddRange(CharacterRange(overlay[i], overlay[i + 1] - 1),
                    CharacterRangeSplitter::kInOverlay);
   }
   CharacterRangeSplitter callback(included, excluded);
@@ -4978,87 +5020,6 @@
   return true;
 }
 
-SetRelation CharacterRange::WordCharacterRelation(
-    ZoneList<CharacterRange>* range) {
-  ASSERT(IsCanonical(range));
-  int i = 0;  // Word character range index.
-  int j = 0;  // Argument range index.
-  ASSERT_NE(0, kWordRangeCount);
-  SetRelation result;
-  if (range->length() == 0) {
-    result.SetElementsInSecondSet();
-    return result;
-  }
-  CharacterRange argument_range = range->at(0);
-  CharacterRange word_range = CharacterRange(kWordRanges[0], kWordRanges[1]);
-  while (i < kWordRangeCount && j < range->length()) {
-    // Check the two ranges for the five cases:
-    // - no overlap.
-    // - partial overlap (there are elements in both ranges that isn't
-    //   in the other, and there are also elements that are in both).
-    // - argument range entirely inside word range.
-    // - word range entirely inside argument range.
-    // - ranges are completely equal.
-
-    // First check for no overlap. The earlier range is not in the other set.
-    if (argument_range.from() > word_range.to()) {
-      // Ranges are disjoint. The earlier word range contains elements that
-      // cannot be in the argument set.
-      result.SetElementsInSecondSet();
-    } else if (word_range.from() > argument_range.to()) {
-      // Ranges are disjoint. The earlier argument range contains elements that
-      // cannot be in the word set.
-      result.SetElementsInFirstSet();
-    } else if (word_range.from() <= argument_range.from() &&
-               word_range.to() >= argument_range.from()) {
-      result.SetElementsInBothSets();
-      // argument range completely inside word range.
-      if (word_range.from() < argument_range.from() ||
-          word_range.to() > argument_range.from()) {
-        result.SetElementsInSecondSet();
-      }
-    } else if (word_range.from() >= argument_range.from() &&
-               word_range.to() <= argument_range.from()) {
-      result.SetElementsInBothSets();
-      result.SetElementsInFirstSet();
-    } else {
-      // There is overlap, and neither is a subrange of the other
-      result.SetElementsInFirstSet();
-      result.SetElementsInSecondSet();
-      result.SetElementsInBothSets();
-    }
-    if (result.NonTrivialIntersection()) {
-      // The result is as (im)precise as we can possibly make it.
-      return result;
-    }
-    // Progress the range(s) with minimal to-character.
-    uc16 word_to = word_range.to();
-    uc16 argument_to = argument_range.to();
-    if (argument_to <= word_to) {
-      j++;
-      if (j < range->length()) {
-        argument_range = range->at(j);
-      }
-    }
-    if (word_to <= argument_to) {
-      i += 2;
-      if (i < kWordRangeCount) {
-        word_range = CharacterRange(kWordRanges[i], kWordRanges[i + 1]);
-      }
-    }
-  }
-  // Check if anything wasn't compared in the loop.
-  if (i < kWordRangeCount) {
-    // word range contains something not in argument range.
-    result.SetElementsInSecondSet();
-  } else if (j < range->length()) {
-    // Argument range contains something not in word range.
-    result.SetElementsInFirstSet();
-  }
-
-  return result;
-}
-
 
 ZoneList<CharacterRange>* CharacterSet::ranges() {
   if (ranges_ == NULL) {
@@ -5191,145 +5152,6 @@
 }
 
 
-// Utility function for CharacterRange::Merge. Adds a range at the end of
-// a canonicalized range list, if necessary merging the range with the last
-// range of the list.
-static void AddRangeToSet(ZoneList<CharacterRange>* set, CharacterRange range) {
-  if (set == NULL) return;
-  ASSERT(set->length() == 0 || set->at(set->length() - 1).to() < range.from());
-  int n = set->length();
-  if (n > 0) {
-    CharacterRange lastRange = set->at(n - 1);
-    if (lastRange.to() == range.from() - 1) {
-      set->at(n - 1) = CharacterRange(lastRange.from(), range.to());
-      return;
-    }
-  }
-  set->Add(range);
-}
-
-
-static void AddRangeToSelectedSet(int selector,
-                                  ZoneList<CharacterRange>* first_set,
-                                  ZoneList<CharacterRange>* second_set,
-                                  ZoneList<CharacterRange>* intersection_set,
-                                  CharacterRange range) {
-  switch (selector) {
-    case kInsideFirst:
-      AddRangeToSet(first_set, range);
-      break;
-    case kInsideSecond:
-      AddRangeToSet(second_set, range);
-      break;
-    case kInsideBoth:
-      AddRangeToSet(intersection_set, range);
-      break;
-  }
-}
-
-
-
-void CharacterRange::Merge(ZoneList<CharacterRange>* first_set,
-                           ZoneList<CharacterRange>* second_set,
-                           ZoneList<CharacterRange>* first_set_only_out,
-                           ZoneList<CharacterRange>* second_set_only_out,
-                           ZoneList<CharacterRange>* both_sets_out) {
-  // Inputs are canonicalized.
-  ASSERT(CharacterRange::IsCanonical(first_set));
-  ASSERT(CharacterRange::IsCanonical(second_set));
-  // Outputs are empty, if applicable.
-  ASSERT(first_set_only_out == NULL || first_set_only_out->length() == 0);
-  ASSERT(second_set_only_out == NULL || second_set_only_out->length() == 0);
-  ASSERT(both_sets_out == NULL || both_sets_out->length() == 0);
-
-  // Merge sets by iterating through the lists in order of lowest "from" value,
-  // and putting intervals into one of three sets.
-
-  if (first_set->length() == 0) {
-    second_set_only_out->AddAll(*second_set);
-    return;
-  }
-  if (second_set->length() == 0) {
-    first_set_only_out->AddAll(*first_set);
-    return;
-  }
-  // Indices into input lists.
-  int i1 = 0;
-  int i2 = 0;
-  // Cache length of input lists.
-  int n1 = first_set->length();
-  int n2 = second_set->length();
-  // Current range. May be invalid if state is kInsideNone.
-  int from = 0;
-  int to = -1;
-  // Where current range comes from.
-  int state = kInsideNone;
-
-  while (i1 < n1 || i2 < n2) {
-    CharacterRange next_range;
-    int range_source;
-    if (i2 == n2 ||
-        (i1 < n1 && first_set->at(i1).from() < second_set->at(i2).from())) {
-      // Next smallest element is in first set.
-      next_range = first_set->at(i1++);
-      range_source = kInsideFirst;
-    } else {
-      // Next smallest element is in second set.
-      next_range = second_set->at(i2++);
-      range_source = kInsideSecond;
-    }
-    if (to < next_range.from()) {
-      // Ranges disjoint: |current|  |next|
-      AddRangeToSelectedSet(state,
-                            first_set_only_out,
-                            second_set_only_out,
-                            both_sets_out,
-                            CharacterRange(from, to));
-      from = next_range.from();
-      to = next_range.to();
-      state = range_source;
-    } else {
-      if (from < next_range.from()) {
-        AddRangeToSelectedSet(state,
-                              first_set_only_out,
-                              second_set_only_out,
-                              both_sets_out,
-                              CharacterRange(from, next_range.from()-1));
-      }
-      if (to < next_range.to()) {
-        // Ranges overlap:  |current|
-        //                       |next|
-        AddRangeToSelectedSet(state | range_source,
-                              first_set_only_out,
-                              second_set_only_out,
-                              both_sets_out,
-                              CharacterRange(next_range.from(), to));
-        from = to + 1;
-        to = next_range.to();
-        state = range_source;
-      } else {
-        // Range included:    |current| , possibly ending at same character.
-        //                      |next|
-        AddRangeToSelectedSet(
-            state | range_source,
-            first_set_only_out,
-            second_set_only_out,
-            both_sets_out,
-            CharacterRange(next_range.from(), next_range.to()));
-        from = next_range.to() + 1;
-        // If ranges end at same character, both ranges are consumed completely.
-        if (next_range.to() == to) state = kInsideNone;
-      }
-    }
-  }
-  AddRangeToSelectedSet(state,
-                        first_set_only_out,
-                        second_set_only_out,
-                        both_sets_out,
-                        CharacterRange(from, to));
-}
-
-
 void CharacterRange::Negate(ZoneList<CharacterRange>* ranges,
                             ZoneList<CharacterRange>* negated_ranges) {
   ASSERT(CharacterRange::IsCanonical(ranges));
@@ -5642,169 +5464,20 @@
 
 void Analysis::VisitAssertion(AssertionNode* that) {
   EnsureAnalyzed(that->on_success());
-  AssertionNode::AssertionNodeType type = that->type();
-  if (type == AssertionNode::AT_BOUNDARY ||
-      type == AssertionNode::AT_NON_BOUNDARY) {
-    // Check if the following character is known to be a word character
-    // or known to not be a word character.
-    ZoneList<CharacterRange>* following_chars = that->FirstCharacterSet();
-
-    CharacterRange::Canonicalize(following_chars);
-
-    SetRelation word_relation =
-        CharacterRange::WordCharacterRelation(following_chars);
-    if (word_relation.Disjoint()) {
-      // Includes the case where following_chars is empty (e.g., end-of-input).
-      // Following character is definitely *not* a word character.
-      type = (type == AssertionNode::AT_BOUNDARY) ?
-                 AssertionNode::AFTER_WORD_CHARACTER :
-                 AssertionNode::AFTER_NONWORD_CHARACTER;
-      that->set_type(type);
-    } else if (word_relation.ContainedIn()) {
-      // Following character is definitely a word character.
-      type = (type == AssertionNode::AT_BOUNDARY) ?
-                 AssertionNode::AFTER_NONWORD_CHARACTER :
-                 AssertionNode::AFTER_WORD_CHARACTER;
-      that->set_type(type);
-    }
-  }
 }
 
 
-ZoneList<CharacterRange>* RegExpNode::FirstCharacterSet() {
-  if (first_character_set_ == NULL) {
-    if (ComputeFirstCharacterSet(kFirstCharBudget) < 0) {
-      // If we can't find an exact solution within the budget, we
-      // set the value to the set of every character, i.e., all characters
-      // are possible.
-      ZoneList<CharacterRange>* all_set = new ZoneList<CharacterRange>(1);
-      all_set->Add(CharacterRange::Everything());
-      first_character_set_ = all_set;
-    }
-  }
-  return first_character_set_;
+void BackReferenceNode::FillInBMInfo(
+    int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+  // Working out the set of characters that a backreference can match is too
+  // hard, so we just say that any character can match.
+  bm->SetRest(offset);
+  SaveBMInfo(bm, not_at_start, offset);
 }
 
 
-int RegExpNode::ComputeFirstCharacterSet(int budget) {
-  // Default behavior is to not be able to determine the first character.
-  return kComputeFirstCharacterSetFail;
-}
-
-
-int LoopChoiceNode::ComputeFirstCharacterSet(int budget) {
-  budget--;
-  if (budget >= 0) {
-    // Find loop min-iteration. It's the value of the guarded choice node
-    // with a GEQ guard, if any.
-    int min_repetition = 0;
-
-    for (int i = 0; i <= 1; i++) {
-      GuardedAlternative alternative = alternatives()->at(i);
-      ZoneList<Guard*>* guards = alternative.guards();
-      if (guards != NULL && guards->length() > 0) {
-        Guard* guard = guards->at(0);
-        if (guard->op() == Guard::GEQ) {
-          min_repetition = guard->value();
-          break;
-        }
-      }
-    }
-
-    budget = loop_node()->ComputeFirstCharacterSet(budget);
-    if (budget >= 0) {
-      ZoneList<CharacterRange>* character_set =
-          loop_node()->first_character_set();
-      if (body_can_be_zero_length() || min_repetition == 0) {
-        budget = continue_node()->ComputeFirstCharacterSet(budget);
-        if (budget < 0) return budget;
-        ZoneList<CharacterRange>* body_set =
-            continue_node()->first_character_set();
-        ZoneList<CharacterRange>* union_set =
-          new ZoneList<CharacterRange>(Max(character_set->length(),
-                                           body_set->length()));
-        CharacterRange::Merge(character_set,
-                              body_set,
-                              union_set,
-                              union_set,
-                              union_set);
-        character_set = union_set;
-      }
-      set_first_character_set(character_set);
-    }
-  }
-  return budget;
-}
-
-
-int NegativeLookaheadChoiceNode::ComputeFirstCharacterSet(int budget) {
-  budget--;
-  if (budget >= 0) {
-    GuardedAlternative successor = this->alternatives()->at(1);
-    RegExpNode* successor_node = successor.node();
-    budget = successor_node->ComputeFirstCharacterSet(budget);
-    if (budget >= 0) {
-      set_first_character_set(successor_node->first_character_set());
-    }
-  }
-  return budget;
-}
-
-
-// The first character set of an EndNode is unknowable. Just use the
-// default implementation that fails and returns all characters as possible.
-
-
-int AssertionNode::ComputeFirstCharacterSet(int budget) {
-  budget -= 1;
-  if (budget >= 0) {
-    switch (type_) {
-      case AT_END: {
-        set_first_character_set(new ZoneList<CharacterRange>(0));
-        break;
-      }
-      case AT_START:
-      case AT_BOUNDARY:
-      case AT_NON_BOUNDARY:
-      case AFTER_NEWLINE:
-      case AFTER_NONWORD_CHARACTER:
-      case AFTER_WORD_CHARACTER: {
-        ASSERT_NOT_NULL(on_success());
-        budget = on_success()->ComputeFirstCharacterSet(budget);
-        if (budget >= 0) {
-          set_first_character_set(on_success()->first_character_set());
-        }
-        break;
-      }
-    }
-  }
-  return budget;
-}
-
-
-int ActionNode::ComputeFirstCharacterSet(int budget) {
-  if (type_ == POSITIVE_SUBMATCH_SUCCESS) return kComputeFirstCharacterSetFail;
-  budget--;
-  if (budget >= 0) {
-    ASSERT_NOT_NULL(on_success());
-    budget = on_success()->ComputeFirstCharacterSet(budget);
-    if (budget >= 0) {
-      set_first_character_set(on_success()->first_character_set());
-    }
-  }
-  return budget;
-}
-
-
-int BackReferenceNode::ComputeFirstCharacterSet(int budget) {
-  // We don't know anything about the first character of a backreference
-  // at this point.
-  // The potential first characters are the first characters of the capture,
-  // and the first characters of the on_success node, depending on whether the
-  // capture can be empty and whether it is known to be participating or known
-  // not to be.
-  return kComputeFirstCharacterSetFail;
-}
+STATIC_ASSERT(BoyerMoorePositionInfo::kMapSize ==
+              RegExpMacroAssembler::kTableSize);
 
 
 void ChoiceNode::FillInBMInfo(
@@ -5814,24 +5487,33 @@
     GuardedAlternative& alt = alts->at(i);
     if (alt.guards() != NULL && alt.guards()->length() != 0) {
       bm->SetRest(offset);  // Give up trying to fill in info.
+      SaveBMInfo(bm, not_at_start, offset);
       return;
     }
     alt.node()->FillInBMInfo(offset, bm, not_at_start);
   }
+  SaveBMInfo(bm, not_at_start, offset);
 }
 
 
 void TextNode::FillInBMInfo(
-    int offset, BoyerMooreLookahead* bm, bool not_at_start) {
-  if (offset >= bm->length()) return;
+    int initial_offset, BoyerMooreLookahead* bm, bool not_at_start) {
+  if (initial_offset >= bm->length()) return;
+  int offset = initial_offset;
   int max_char = bm->max_char();
   for (int i = 0; i < elements()->length(); i++) {
-    if (offset >= bm->length()) return;
+    if (offset >= bm->length()) {
+      if (initial_offset == 0) set_bm_info(not_at_start, bm);
+      return;
+    }
     TextElement text = elements()->at(i);
     if (text.type == TextElement::ATOM) {
       RegExpAtom* atom = text.data.u_atom;
       for (int j = 0; j < atom->length(); j++, offset++) {
-        if (offset >= bm->length()) return;
+        if (offset >= bm->length()) {
+          if (initial_offset == 0) set_bm_info(not_at_start, bm);
+          return;
+        }
         uc16 character = atom->data()[j];
         if (bm->compiler()->ignore_case()) {
           unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
@@ -5858,67 +5540,23 @@
           CharacterRange& range = ranges->at(k);
           if (range.from() > max_char) continue;
           int to = Min(max_char, static_cast<int>(range.to()));
-          if (to - range.from() >= BoyerMooreLookahead::kTooManyCharacters) {
-            bm->SetAll(offset);
-            break;
-          }
-          for (int m = range.from(); m <= to; m++) {
-            bm->Set(offset, m);
-          }
+          bm->SetInterval(offset, Interval(range.from(), to));
         }
       }
       offset++;
     }
   }
-  if (offset >= bm->length()) return;
+  if (offset >= bm->length()) {
+    if (initial_offset == 0) set_bm_info(not_at_start, bm);
+    return;
+  }
   on_success()->FillInBMInfo(offset,
                              bm,
                              true);  // Not at start after a text node.
+  if (initial_offset == 0) set_bm_info(not_at_start, bm);
 }
 
 
-int TextNode::ComputeFirstCharacterSet(int budget) {
-  budget--;
-  if (budget >= 0) {
-    ASSERT_NE(0, elements()->length());
-    TextElement text = elements()->at(0);
-    if (text.type == TextElement::ATOM) {
-      RegExpAtom* atom = text.data.u_atom;
-      ASSERT_NE(0, atom->length());
-      uc16 first_char = atom->data()[0];
-      ZoneList<CharacterRange>* range = new ZoneList<CharacterRange>(1);
-      range->Add(CharacterRange(first_char, first_char));
-      set_first_character_set(range);
-    } else {
-      ASSERT(text.type == TextElement::CHAR_CLASS);
-      RegExpCharacterClass* char_class = text.data.u_char_class;
-      ZoneList<CharacterRange>* ranges = char_class->ranges();
-      // TODO(lrn): Canonicalize ranges when they are created
-      // instead of waiting until now.
-      CharacterRange::Canonicalize(ranges);
-      if (char_class->is_negated()) {
-        int length = ranges->length();
-        int new_length = length + 1;
-        if (length > 0) {
-          if (ranges->at(0).from() == 0) new_length--;
-          if (ranges->at(length - 1).to() == String::kMaxUtf16CodeUnit) {
-            new_length--;
-          }
-        }
-        ZoneList<CharacterRange>* negated_ranges =
-            new ZoneList<CharacterRange>(new_length);
-        CharacterRange::Negate(ranges, negated_ranges);
-        set_first_character_set(negated_ranges);
-      } else {
-        set_first_character_set(ranges);
-      }
-    }
-  }
-  return budget;
-}
-
-
-
 // -------------------------------------------------------------------
 // Dispatch table construction
 
diff --git a/src/jsregexp.h b/src/jsregexp.h
index 288e995..4847739 100644
--- a/src/jsregexp.h
+++ b/src/jsregexp.h
@@ -40,6 +40,7 @@
 class RegExpMacroAssembler;
 class RegExpNode;
 class RegExpTree;
+class BoyerMooreLookahead;
 
 class RegExpImpl {
  public:
@@ -224,48 +225,6 @@
 };
 
 
-// Represents the relation of two sets.
-// Sets can be either disjoint, partially or fully overlapping, or equal.
-class SetRelation BASE_EMBEDDED {
- public:
-  // Relation is represented by a bit saying whether there are elements in
-  // one set that is not in the other, and a bit saying that there are elements
-  // that are in both sets.
-
-  // Location of an element. Corresponds to the internal areas of
-  // a Venn diagram.
-  enum {
-    kInFirst = 1 << kInsideFirst,
-    kInSecond = 1 << kInsideSecond,
-    kInBoth = 1 << kInsideBoth
-  };
-  SetRelation() : bits_(0) {}
-  ~SetRelation() {}
-  // Add the existence of objects in a particular
-  void SetElementsInFirstSet() { bits_ |= kInFirst; }
-  void SetElementsInSecondSet() { bits_ |= kInSecond; }
-  void SetElementsInBothSets() { bits_ |= kInBoth; }
-  // Check the currently known relation of the sets (common functions only,
-  // for other combinations, use value() to get the bits and check them
-  // manually).
-  // Sets are completely disjoint.
-  bool Disjoint() { return (bits_ & kInBoth) == 0; }
-  // Sets are equal.
-  bool Equals() { return (bits_ & (kInFirst | kInSecond)) == 0; }
-  // First set contains second.
-  bool Contains() { return (bits_ & kInSecond) == 0; }
-  // Second set contains first.
-  bool ContainedIn() { return (bits_ & kInFirst) == 0; }
-  bool NonTrivialIntersection() {
-    return (bits_ == (kInFirst | kInSecond | kInBoth));
-  }
-  int value() { return bits_; }
-
- private:
-  int bits_;
-};
-
-
 class CharacterRange {
  public:
   CharacterRange() : from_(0), to_(0) { }
@@ -273,7 +232,7 @@
   CharacterRange(void* null) { ASSERT_EQ(NULL, null); }  //NOLINT
   CharacterRange(uc16 from, uc16 to) : from_(from), to_(to) { }
   static void AddClassEscape(uc16 type, ZoneList<CharacterRange>* ranges);
-  static Vector<const uc16> GetWordBounds();
+  static Vector<const int> GetWordBounds();
   static inline CharacterRange Singleton(uc16 value) {
     return CharacterRange(value, value);
   }
@@ -294,7 +253,7 @@
   bool IsSingleton() { return (from_ == to_); }
   void AddCaseEquivalents(ZoneList<CharacterRange>* ranges, bool is_ascii);
   static void Split(ZoneList<CharacterRange>* base,
-                    Vector<const uc16> overlay,
+                    Vector<const int> overlay,
                     ZoneList<CharacterRange>** included,
                     ZoneList<CharacterRange>** excluded);
   // Whether a range list is in canonical form: Ranges ordered by from value,
@@ -305,28 +264,6 @@
   // adjacent ranges are merged. The resulting list may be shorter than the
   // original, but cannot be longer.
   static void Canonicalize(ZoneList<CharacterRange>* ranges);
-  // Check how the set of characters defined by a CharacterRange list relates
-  // to the set of word characters. List must be in canonical form.
-  static SetRelation WordCharacterRelation(ZoneList<CharacterRange>* ranges);
-  // Takes two character range lists (representing character sets) in canonical
-  // form and merges them.
-  // The characters that are only covered by the first set are added to
-  // first_set_only_out. the characters that are only in the second set are
-  // added to second_set_only_out, and the characters that are in both are
-  // added to both_sets_out.
-  // The pointers to first_set_only_out, second_set_only_out and both_sets_out
-  // should be to empty lists, but they need not be distinct, and may be NULL.
-  // If NULL, the characters are dropped, and if two arguments are the same
-  // pointer, the result is the union of the two sets that would be created
-  // if the pointers had been distinct.
-  // This way, the Merge function can compute all the usual set operations:
-  // union (all three out-sets are equal), intersection (only both_sets_out is
-  // non-NULL), and set difference (only first_set is non-NULL).
-  static void Merge(ZoneList<CharacterRange>* first_set,
-                    ZoneList<CharacterRange>* second_set,
-                    ZoneList<CharacterRange>* first_set_only_out,
-                    ZoneList<CharacterRange>* second_set_only_out,
-                    ZoneList<CharacterRange>* both_sets_out);
   // Negate the contents of a character range in canonical form.
   static void Negate(ZoneList<CharacterRange>* src,
                      ZoneList<CharacterRange>* dst);
@@ -421,90 +358,6 @@
 };
 
 
-// Improve the speed that we scan for an initial point where a non-anchored
-// regexp can match by using a Boyer-Moore-like table. This is done by
-// identifying non-greedy non-capturing loops in the nodes that eat any
-// character one at a time.  For example in the middle of the regexp
-// /foo[\s\S]*?bar/ we find such a loop.  There is also such a loop implicitly
-// inserted at the start of any non-anchored regexp.
-//
-// When we have found such a loop we look ahead in the nodes to find the set of
-// characters that can come at given distances. For example for the regexp
-// /.?foo/ we know that there are at least 3 characters ahead of us, and the
-// sets of characters that can occur are [any, [f, o], [o]]. We find a range in
-// the lookahead info where the set of characters is reasonably constrained. In
-// our example this is from index 1 to 2 (0 is not constrained). We can now
-// look 3 characters ahead and if we don't find one of [f, o] (the union of
-// [f, o] and [o]) then we can skip forwards by the range size (in this case 2).
-//
-// For Unicode input strings we do the same, but modulo 128.
-//
-// We also look at the first string fed to the regexp and use that to get a hint
-// of the character frequencies in the inputs. This affects the assessment of
-// whether the set of characters is 'reasonably constrained'.
-//
-// We also have another lookahead mechanism (called quick check in the code),
-// which uses a wide load of multiple characters followed by a mask and compare
-// to determine whether a match is possible at this point.
-class BoyerMooreLookahead {
- public:
-  BoyerMooreLookahead(int length, int map_length, RegExpCompiler* compiler);
-
-  int length() { return length_; }
-  int max_char() { return max_char_; }
-  RegExpCompiler* compiler() { return compiler_; }
-
-  static const int kTooManyCharacters = 32;
-
-  int Count(int map_number) {
-    ZoneList<bool>* map = bitmaps_->at(map_number);
-    if (map == NULL) return map_length_;
-    int count = 0;
-    for (int i = 0; i < map_length_; i++) {
-      if (map->at(i)) count++;
-    }
-    return count;
-  }
-
-  void Set(int map_number, int character) {
-    if (character > max_char_) return;
-    ZoneList<bool>* map = bitmaps_->at(map_number);
-    if (map == NULL) return;
-    map->at(character & (map_length_ - 1)) = true;
-  }
-
-  void SetAll(int map_number) {
-    bitmaps_->at(map_number) = NULL;
-  }
-
-  void SetRest(int from_map) {
-    for (int i = from_map; i < length_; i++) SetAll(i);
-  }
-  bool EmitSkipInstructions(RegExpMacroAssembler* masm);
-
- private:
-  // This is the value obtained by EatsAtLeast.  If we do not have at least this
-  // many characters left in the sample string then the match is bound to fail.
-  // Therefore it is OK to read a character this far ahead of the current match
-  // point.
-  int length_;
-  // We conservatively consider all character values modulo this length.  For
-  // ASCII there is no loss of precision, since this has a value of 128.
-  int map_length_;
-  RegExpCompiler* compiler_;
-  // 0x7f for ASCII, 0xffff for UTF-16.
-  int max_char_;
-  ZoneList<ZoneList<bool>*>* bitmaps_;
-
-  int GetSkipTable(int min_lookahead,
-                   int max_lookahead,
-                   Handle<ByteArray> boolean_skip_table);
-  bool FindWorthwhileInterval(int* from, int* to);
-  int FindBestInterval(
-    int max_number_of_chars, int old_biggest_points, int* from, int* to);
-};
-
-
 #define FOR_EACH_NODE_TYPE(VISIT)                                    \
   VISIT(End)                                                         \
   VISIT(Action)                                                      \
@@ -687,7 +540,9 @@
 
 class RegExpNode: public ZoneObject {
  public:
-  RegExpNode() : first_character_set_(NULL), trace_count_(0) { }
+  RegExpNode() : first_character_set_(NULL), trace_count_(0) {
+    bm_info_[0] = bm_info_[1] = NULL;
+  }
   virtual ~RegExpNode();
   virtual void Accept(NodeVisitor* visitor) = 0;
   // Generates a goto to this node or actually generates the code at this point.
@@ -731,11 +586,18 @@
   // Collects information on the possible code units (mod 128) that can match if
   // we look forward.  This is used for a Boyer-Moore-like string searching
   // implementation.  TODO(erikcorry):  This should share more code with
-  // EatsAtLeast, GetQuickCheckDetails and ComputeFirstCharacterSet.
+  // EatsAtLeast, GetQuickCheckDetails.
   virtual void FillInBMInfo(
       int offset, BoyerMooreLookahead* bm, bool not_at_start) {
     UNREACHABLE();
   }
+  // We want to avoid recalculating the lookahead info, so we store it on the
+  // node.  Only info that is for this node is stored.  We can tell that the
+  // info is for this node when offset == 0, so the information is calculated
+  // relative to this node.
+  void SaveBMInfo(BoyerMooreLookahead* bm, bool not_at_start, int offset) {
+    if (offset == 0) set_bm_info(not_at_start, bm);
+  }
 
   Label* label() { return &label_; }
   // If non-generic code is generated for a node (i.e. the node is not at the
@@ -759,17 +621,6 @@
   SiblingList* siblings() { return &siblings_; }
   void set_siblings(SiblingList* other) { siblings_ = *other; }
 
-  // Return the set of possible next characters recognized by the regexp
-  // (or a safe subset, potentially the set of all characters).
-  ZoneList<CharacterRange>* FirstCharacterSet();
-
-  // Compute (if possible within the budget of traversed nodes) the
-  // possible first characters of the input matched by this node and
-  // its continuation. Returns the remaining budget after the computation.
-  // If the budget is spent, the result is negative, and the cached
-  // first_character_set_ value isn't set.
-  virtual int ComputeFirstCharacterSet(int budget);
-
   // Get and set the cached first character set value.
   ZoneList<CharacterRange>* first_character_set() {
     return first_character_set_;
@@ -777,6 +628,9 @@
   void set_first_character_set(ZoneList<CharacterRange>* character_set) {
     first_character_set_ = character_set;
   }
+  BoyerMooreLookahead* bm_info(bool not_at_start) {
+    return bm_info_[not_at_start ? 1 : 0];
+  }
 
  protected:
   enum LimitResult { DONE, CONTINUE };
@@ -801,6 +655,10 @@
   // processed before it is on a usable state.
   virtual RegExpNode* Clone() = 0;
 
+  void set_bm_info(bool not_at_start, BoyerMooreLookahead* bm) {
+    bm_info_[not_at_start ? 1 : 0] = bm;
+  }
+
  private:
   static const int kFirstCharBudget = 10;
   Label label_;
@@ -813,6 +671,7 @@
   // a trace, in which case it is generic and can be reused by flushing the
   // deferred operations in the current trace and generating a goto.
   int trace_count_;
+  BoyerMooreLookahead* bm_info_[2];
 };
 
 
@@ -833,8 +692,8 @@
     return (from_ <= value) && (value <= to_);
   }
   bool is_empty() { return from_ == kNone; }
-  int from() { return from_; }
-  int to() { return to_; }
+  int from() const { return from_; }
+  int to() const { return to_; }
   static Interval Empty() { return Interval(); }
   static const int kNone = -1;
  private:
@@ -852,6 +711,7 @@
   virtual void FillInBMInfo(
       int offset, BoyerMooreLookahead* bm, bool not_at_start) {
     on_success_->FillInBMInfo(offset, bm, not_at_start);
+    if (offset == 0) set_bm_info(not_at_start, bm);
   }
  private:
   RegExpNode* on_success_;
@@ -905,7 +765,6 @@
   // TODO(erikcorry): We should allow some action nodes in greedy loops.
   virtual int GreedyLoopTextLength() { return kNodeIsTooComplexForGreedyLoops; }
   virtual ActionNode* Clone() { return new ActionNode(*this); }
-  virtual int ComputeFirstCharacterSet(int budget);
 
  private:
   union {
@@ -978,7 +837,6 @@
     return result;
   }
   void CalculateOffsets();
-  virtual int ComputeFirstCharacterSet(int budget);
 
  private:
   enum TextEmitPassType {
@@ -1009,12 +867,7 @@
     AT_START,
     AT_BOUNDARY,
     AT_NON_BOUNDARY,
-    AFTER_NEWLINE,
-    // Types not directly expressible in regexp syntax.
-    // Used for modifying a boundary node if its following character is
-    // known to be word and/or non-word.
-    AFTER_NONWORD_CHARACTER,
-    AFTER_WORD_CHARACTER
+    AFTER_NEWLINE
   };
   static AssertionNode* AtEnd(RegExpNode* on_success) {
     return new AssertionNode(AT_END, on_success);
@@ -1042,12 +895,16 @@
                                     bool not_at_start);
   virtual void FillInBMInfo(
       int offset, BoyerMooreLookahead* bm, bool not_at_start);
-  virtual int ComputeFirstCharacterSet(int budget);
   virtual AssertionNode* Clone() { return new AssertionNode(*this); }
   AssertionNodeType type() { return type_; }
   void set_type(AssertionNodeType type) { type_ = type; }
 
  private:
+  void EmitBoundaryCheck(RegExpCompiler* compiler, Trace* trace);
+  enum IfPrevious { kIsNonWord, kIsWord };
+  void BacktrackIfPrevious(RegExpCompiler* compiler,
+                           Trace* trace,
+                           IfPrevious backtrack_if_previous);
   AssertionNode(AssertionNodeType t, RegExpNode* on_success)
       : SeqRegExpNode(on_success), type_(t) { }
   AssertionNodeType type_;
@@ -1076,13 +933,8 @@
     return;
   }
   virtual void FillInBMInfo(
-      int offset, BoyerMooreLookahead* bm, bool not_at_start) {
-    // Working out the set of characters that a backreference can match is too
-    // hard, so we just say that any character can match.
-    bm->SetRest(offset);
-  }
+      int offset, BoyerMooreLookahead* bm, bool not_at_start);
   virtual BackReferenceNode* Clone() { return new BackReferenceNode(*this); }
-  virtual int ComputeFirstCharacterSet(int budget);
 
  private:
   int start_reg_;
@@ -1249,6 +1101,7 @@
   virtual void FillInBMInfo(
       int offset, BoyerMooreLookahead* bm, bool not_at_start) {
     alternatives_->at(1).node()->FillInBMInfo(offset, bm, not_at_start);
+    if (offset == 0) set_bm_info(not_at_start, bm);
   }
   // For a negative lookahead we don't emit the quick check for the
   // alternative that is expected to fail.  This is because quick check code
@@ -1256,7 +1109,6 @@
   // characters, but on a negative lookahead the negative branch did not take
   // part in that calculation (EatsAtLeast) so the assumptions don't hold.
   virtual bool try_to_emit_quick_check_for_alternative(int i) { return i != 0; }
-  virtual int ComputeFirstCharacterSet(int budget);
 };
 
 
@@ -1279,7 +1131,6 @@
                                     bool not_at_start);
   virtual void FillInBMInfo(
       int offset, BoyerMooreLookahead* bm, bool not_at_start);
-  virtual int ComputeFirstCharacterSet(int budget);
   virtual LoopChoiceNode* Clone() { return new LoopChoiceNode(*this); }
   RegExpNode* loop_node() { return loop_node_; }
   RegExpNode* continue_node() { return continue_node_; }
@@ -1300,6 +1151,146 @@
 };
 
 
+// Improve the speed that we scan for an initial point where a non-anchored
+// regexp can match by using a Boyer-Moore-like table. This is done by
+// identifying non-greedy non-capturing loops in the nodes that eat any
+// character one at a time.  For example in the middle of the regexp
+// /foo[\s\S]*?bar/ we find such a loop.  There is also such a loop implicitly
+// inserted at the start of any non-anchored regexp.
+//
+// When we have found such a loop we look ahead in the nodes to find the set of
+// characters that can come at given distances. For example for the regexp
+// /.?foo/ we know that there are at least 3 characters ahead of us, and the
+// sets of characters that can occur are [any, [f, o], [o]]. We find a range in
+// the lookahead info where the set of characters is reasonably constrained. In
+// our example this is from index 1 to 2 (0 is not constrained). We can now
+// look 3 characters ahead and if we don't find one of [f, o] (the union of
+// [f, o] and [o]) then we can skip forwards by the range size (in this case 2).
+//
+// For Unicode input strings we do the same, but modulo 128.
+//
+// We also look at the first string fed to the regexp and use that to get a hint
+// of the character frequencies in the inputs. This affects the assessment of
+// whether the set of characters is 'reasonably constrained'.
+//
+// We also have another lookahead mechanism (called quick check in the code),
+// which uses a wide load of multiple characters followed by a mask and compare
+// to determine whether a match is possible at this point.
+enum ContainedInLattice {
+  kNotYet = 0,
+  kLatticeIn = 1,
+  kLatticeOut = 2,
+  kLatticeUnknown = 3  // Can also mean both in and out.
+};
+
+
+inline ContainedInLattice Combine(ContainedInLattice a, ContainedInLattice b) {
+  return static_cast<ContainedInLattice>(a | b);
+}
+
+
+ContainedInLattice AddRange(ContainedInLattice a,
+                            const int* ranges,
+                            int ranges_size,
+                            Interval new_range);
+
+
+class BoyerMoorePositionInfo : public ZoneObject {
+ public:
+  BoyerMoorePositionInfo()
+      : map_(new ZoneList<bool>(kMapSize)),
+        map_count_(0),
+        w_(kNotYet),
+        s_(kNotYet),
+        d_(kNotYet),
+        surrogate_(kNotYet) {
+     for (int i = 0; i < kMapSize; i++) {
+       map_->Add(false);
+     }
+  }
+
+  bool& at(int i) { return map_->at(i); }
+
+  static const int kMapSize = 128;
+  static const int kMask = kMapSize - 1;
+
+  int map_count() const { return map_count_; }
+
+  void Set(int character);
+  void SetInterval(const Interval& interval);
+  void SetAll();
+  bool is_non_word() { return w_ == kLatticeOut; }
+  bool is_word() { return w_ == kLatticeIn; }
+
+ private:
+  ZoneList<bool>* map_;
+  int map_count_;  // Number of set bits in the map.
+  ContainedInLattice w_;  // The \w character class.
+  ContainedInLattice s_;  // The \s character class.
+  ContainedInLattice d_;  // The \d character class.
+  ContainedInLattice surrogate_;  // Surrogate UTF-16 code units.
+};
+
+
+class BoyerMooreLookahead : public ZoneObject {
+ public:
+  BoyerMooreLookahead(int length, RegExpCompiler* compiler);
+
+  int length() { return length_; }
+  int max_char() { return max_char_; }
+  RegExpCompiler* compiler() { return compiler_; }
+
+  int Count(int map_number) {
+    return bitmaps_->at(map_number)->map_count();
+  }
+
+  BoyerMoorePositionInfo* at(int i) { return bitmaps_->at(i); }
+
+  void Set(int map_number, int character) {
+    if (character > max_char_) return;
+    BoyerMoorePositionInfo* info = bitmaps_->at(map_number);
+    info->Set(character);
+  }
+
+  void SetInterval(int map_number, const Interval& interval) {
+    if (interval.from() > max_char_) return;
+    BoyerMoorePositionInfo* info = bitmaps_->at(map_number);
+    if (interval.to() > max_char_) {
+      info->SetInterval(Interval(interval.from(), max_char_));
+    } else {
+      info->SetInterval(interval);
+    }
+  }
+
+  void SetAll(int map_number) {
+    bitmaps_->at(map_number)->SetAll();
+  }
+
+  void SetRest(int from_map) {
+    for (int i = from_map; i < length_; i++) SetAll(i);
+  }
+  bool EmitSkipInstructions(RegExpMacroAssembler* masm);
+
+ private:
+  // This is the value obtained by EatsAtLeast.  If we do not have at least this
+  // many characters left in the sample string then the match is bound to fail.
+  // Therefore it is OK to read a character this far ahead of the current match
+  // point.
+  int length_;
+  RegExpCompiler* compiler_;
+  // 0x7f for ASCII, 0xffff for UTF-16.
+  int max_char_;
+  ZoneList<BoyerMoorePositionInfo*>* bitmaps_;
+
+  int GetSkipTable(int min_lookahead,
+                   int max_lookahead,
+                   Handle<ByteArray> boolean_skip_table);
+  bool FindWorthwhileInterval(int* from, int* to);
+  int FindBestInterval(
+    int max_number_of_chars, int old_biggest_points, int* from, int* to);
+};
+
+
 // There are many ways to generate code for a node.  This class encapsulates
 // the current way we should be generating.  In other words it encapsulates
 // the current state of the code generator.  The effect of this is that we
diff --git a/src/macros.py b/src/macros.py
index 93287ae..699b368 100644
--- a/src/macros.py
+++ b/src/macros.py
@@ -204,6 +204,15 @@
 const CAPTURE0 = 3;
 const CAPTURE1 = 4;
 
+# For the regexp capture override array.  This has the same
+# format as the arguments to a function called from
+# String.prototype.replace.
+macro OVERRIDE_MATCH(override) = ((override)[0]);
+macro OVERRIDE_POS(override) = ((override)[(override).length - 2]);
+macro OVERRIDE_SUBJECT(override) = ((override)[(override).length - 1]);
+# 1-based so index of 1 returns the first capture
+macro OVERRIDE_CAPTURE(override, index) = ((override)[(index)]);
+
 # PropertyDescriptor return value indices - must match
 # PropertyDescriptorIndices in runtime.cc.
 const IS_ACCESSOR_INDEX = 0;
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 346b9a1..0f6a566 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -1525,12 +1525,6 @@
                              JSFunction::kCodeEntryOffset + kPointerSize),
         HeapObject::RawField(object,
                              JSFunction::kNonWeakFieldsEndOffset));
-
-    // Don't visit the next function list field as it is a weak reference.
-    Object** next_function =
-        HeapObject::RawField(object, JSFunction::kNextFunctionLinkOffset);
-    heap->mark_compact_collector()->RecordSlot(
-        next_function, next_function, *next_function);
   }
 
   static inline void VisitJSRegExpFields(Map* map,
diff --git a/src/mark-compact.h b/src/mark-compact.h
index 66ffd19..f8488bb 100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -544,6 +544,8 @@
 
   void ClearMarkbits();
 
+  bool is_compacting() const { return compacting_; }
+
  private:
   MarkCompactCollector();
   ~MarkCompactCollector();
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc
index 34bdef0..c56188b 100644
--- a/src/mips/lithium-codegen-mips.cc
+++ b/src/mips/lithium-codegen-mips.cc
@@ -4555,9 +4555,10 @@
         __ sw(a2, FieldMemOperand(result, total_offset + 4));
       }
     } else if (elements->IsFixedArray()) {
+      Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
       for (int i = 0; i < elements_length; i++) {
         int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
-        Handle<Object> value = JSObject::GetElement(object, i);
+        Handle<Object> value(fast_elements->get(i));
         if (value->IsJSObject()) {
           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
           __ Addu(a2, result, Operand(*offset));
diff --git a/src/mips/regexp-macro-assembler-mips.cc b/src/mips/regexp-macro-assembler-mips.cc
index 6bcb301..c48bcc4 100644
--- a/src/mips/regexp-macro-assembler-mips.cc
+++ b/src/mips/regexp-macro-assembler-mips.cc
@@ -480,7 +480,10 @@
     uc16 minus,
     uc16 mask,
     Label* on_not_equal) {
-  UNIMPLEMENTED_MIPS();
+  ASSERT(minus < String::kMaxUtf16CodeUnit);
+  __ Subu(a0, current_character(), Operand(minus));
+  __ And(a0, a0, Operand(mask));
+  BranchOrBacktrack(on_not_equal, ne, a0, Operand(c));
 }
 
 
diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
index c683468..dc2a357 100644
--- a/src/mips/stub-cache-mips.cc
+++ b/src/mips/stub-cache-mips.cc
@@ -565,6 +565,8 @@
   __ Push(scratch, receiver, holder);
   __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
   __ push(scratch);
+  __ li(scratch, Operand(ExternalReference::isolate_address()));
+  __ push(scratch);
 }
 
 
@@ -579,7 +581,7 @@
   ExternalReference ref =
       ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
           masm->isolate());
-  __ PrepareCEntryArgs(5);
+  __ PrepareCEntryArgs(6);
   __ PrepareCEntryFunction(ref);
 
   CEntryStub stub(1);
@@ -587,10 +589,10 @@
 }
 
 
-static const int kFastApiCallArguments = 3;
+static const int kFastApiCallArguments = 4;
 
 
-// Reserves space for the extra arguments to FastHandleApiCall in the
+// Reserves space for the extra arguments to API function in the
 // caller's frame.
 //
 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
@@ -616,7 +618,8 @@
   //  -- sp[0]              : holder (set by CheckPrototypes)
   //  -- sp[4]              : callee JS function
   //  -- sp[8]              : call data
-  //  -- sp[12]             : last JS argument
+  //  -- sp[12]             : isolate
+  //  -- sp[16]             : last JS argument
   //  -- ...
   //  -- sp[(argc + 3) * 4] : first JS argument
   //  -- sp[(argc + 4) * 4] : receiver
@@ -626,7 +629,7 @@
   __ LoadHeapObject(t1, function);
   __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
 
-  // Pass the additional arguments FastHandleApiCall expects.
+  // Pass the additional arguments.
   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
   Handle<Object> call_data(api_call_info->data());
   if (masm->isolate()->heap()->InNewSpace(*call_data)) {
@@ -636,14 +639,17 @@
     __ li(t2, call_data);
   }
 
-  // Store JS function and call data.
+  __ li(t3, Operand(ExternalReference::isolate_address()));
+  // Store JS function, call data and isolate.
   __ sw(t1, MemOperand(sp, 1 * kPointerSize));
   __ sw(t2, MemOperand(sp, 2 * kPointerSize));
+  __ sw(t3, MemOperand(sp, 3 * kPointerSize));
 
-  // a2 points to call data as expected by Arguments
-  // (refer to layout above).
-  __ Addu(a2, sp, Operand(2 * kPointerSize));
+  // Prepare arguments.
+  __ Addu(a2, sp, Operand(3 * kPointerSize));
 
+  // Allocate the v8::Arguments structure in the arguments' space since
+  // it's not controlled by GC.
   const int kApiStackSpace = 4;
 
   FrameScope frame_scope(masm, StackFrame::MANUAL);
@@ -658,9 +664,9 @@
   // Arguments is built at sp + 1 (sp is a reserved spot for ra).
   __ Addu(a1, sp, kPointerSize);
 
-  // v8::Arguments::implicit_args = data
+  // v8::Arguments::implicit_args_
   __ sw(a2, MemOperand(a1, 0 * kPointerSize));
-  // v8::Arguments::values = last argument
+  // v8::Arguments::values_
   __ Addu(t0, a2, Operand(argc * kPointerSize));
   __ sw(t0, MemOperand(a1, 1 * kPointerSize));
   // v8::Arguments::length_ = argc
@@ -838,7 +844,7 @@
           ExternalReference(
               IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
               masm->isolate()),
-          5);
+          6);
     // Restore the name_ register.
     __ pop(name_);
     // Leave the internal frame.
@@ -1207,7 +1213,13 @@
   } else {
     __ li(scratch3, Handle<Object>(callback->data()));
   }
-  __ Push(reg, scratch3, name_reg);
+  __ Subu(sp, sp, 4 * kPointerSize);
+  __ sw(reg, MemOperand(sp, 3 * kPointerSize));
+  __ sw(scratch3, MemOperand(sp, 2 * kPointerSize));
+  __ li(scratch3, Operand(ExternalReference::isolate_address()));
+  __ sw(scratch3, MemOperand(sp, 1 * kPointerSize));
+  __ sw(name_reg, MemOperand(sp, 0 * kPointerSize));
+
   __ mov(a2, scratch2);  // Saved in case scratch2 == a1.
   __ mov(a1, sp);  // a1 (first argument - see note below) = Handle<String>
 
@@ -1226,7 +1238,7 @@
   // a2 (second argument - see note above) = AccessorInfo&
   __ Addu(a2, sp, kPointerSize);
 
-  const int kStackUnwindSpace = 4;
+  const int kStackUnwindSpace = 5;
   Address getter_address = v8::ToCData<Address>(callback->getter());
   ApiFunction fun(getter_address);
   ExternalReference ref =
@@ -1342,24 +1354,17 @@
       // Important invariant in CALLBACKS case: the code above must be
       // structured to never clobber |receiver| register.
       __ li(scratch2, callback);
-      // holder_reg is either receiver or scratch1.
-      if (!receiver.is(holder_reg)) {
-        ASSERT(scratch1.is(holder_reg));
-        __ Push(receiver, holder_reg);
-        __ lw(scratch3,
-              FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
-        __ Push(scratch3, scratch2, name_reg);
-      } else {
-        __ push(receiver);
-        __ lw(scratch3,
-              FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
-        __ Push(holder_reg, scratch3, scratch2, name_reg);
-      }
+
+      __ Push(receiver, holder_reg);
+      __ lw(scratch3,
+            FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
+      __ li(scratch1, Operand(ExternalReference::isolate_address()));
+      __ Push(scratch3, scratch1, scratch2, name_reg);
 
       ExternalReference ref =
           ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
                             masm()->isolate());
-      __ TailCallExternalReference(ref, 5, 1);
+      __ TailCallExternalReference(ref, 6, 1);
     }
   } else {  // !compile_followup_inline
     // Call the runtime system to load the interceptor.
@@ -1372,7 +1377,7 @@
 
     ExternalReference ref = ExternalReference(
         IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
-    __ TailCallExternalReference(ref, 5, 1);
+    __ TailCallExternalReference(ref, 6, 1);
   }
 }
 
@@ -3373,6 +3378,45 @@
 }
 
 
+static void GenerateSmiKeyCheck(MacroAssembler* masm,
+                                Register key,
+                                Register scratch0,
+                                Register scratch1,
+                                FPURegister double_scratch0,
+                                Label* fail) {
+  if (CpuFeatures::IsSupported(FPU)) {
+    CpuFeatures::Scope scope(FPU);
+    Label key_ok;
+    // Check for smi or a smi inside a heap number.  We convert the heap
+    // number and check if the conversion is exact and fits into the smi
+    // range.
+    __ JumpIfSmi(key, &key_ok);
+    __ CheckMap(key,
+                scratch0,
+                Heap::kHeapNumberMapRootIndex,
+                fail,
+                DONT_DO_SMI_CHECK);
+    __ ldc1(double_scratch0, FieldMemOperand(key, HeapNumber::kValueOffset));
+    __ EmitFPUTruncate(kRoundToZero,
+                       double_scratch0,
+                       double_scratch0,
+                       scratch0,
+                       scratch1,
+                       kCheckForInexactConversion);
+
+    __ Branch(fail, ne, scratch1, Operand(zero_reg));
+
+    __ mfc1(scratch0, double_scratch0);
+    __ SmiTagCheckOverflow(key, scratch0, scratch1);
+    __ BranchOnOverflow(fail, scratch1);
+    __ bind(&key_ok);
+  } else {
+    // Check that the key is a smi.
+    __ JumpIfNotSmi(key, fail);
+  }
+}
+
+
 void KeyedLoadStubCompiler::GenerateLoadExternalArray(
     MacroAssembler* masm,
     ElementsKind elements_kind) {
@@ -3389,8 +3433,8 @@
   // This stub is meant to be tail-jumped to, the receiver must already
   // have been verified by the caller to not be a smi.
 
-  // Check that the key is a smi.
-  __ JumpIfNotSmi(key, &miss_force_generic);
+  // Check that the key is a smi or a heap number convertible to a smi.
+  GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
 
   __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
   // a3: elements array
@@ -3728,8 +3772,8 @@
   // This stub is meant to be tail-jumped to, the receiver must already
   // have been verified by the caller to not be a smi.
 
-    // Check that the key is a smi.
-  __ JumpIfNotSmi(key, &miss_force_generic);
+  // Check that the key is a smi or a heap number convertible to a smi.
+  GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
 
   __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
 
@@ -4108,9 +4152,8 @@
   // This stub is meant to be tail-jumped to, the receiver must already
   // have been verified by the caller to not be a smi.
 
-  // Check that the key is a smi.
-  __ JumpIfNotSmi(a0, &miss_force_generic, at, USE_DELAY_SLOT);
-  // The delay slot can be safely used here, a1 is an object pointer.
+  // Check that the key is a smi or a heap number convertible to a smi.
+  GenerateSmiKeyCheck(masm, a0, t0, t1, f2, &miss_force_generic);
 
   // Get the elements array.
   __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
@@ -4160,8 +4203,8 @@
   // This stub is meant to be tail-jumped to, the receiver must already
   // have been verified by the caller to not be a smi.
 
-  // Check that the key is a smi.
-  __ JumpIfNotSmi(key_reg, &miss_force_generic);
+  // Check that the key is a smi or a heap number convertible to a smi.
+  GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
 
   // Get the elements array.
   __ lw(elements_reg,
@@ -4235,8 +4278,8 @@
   // This stub is meant to be tail-jumped to, the receiver must already
   // have been verified by the caller to not be a smi.
 
-  // Check that the key is a smi.
-  __ JumpIfNotSmi(key_reg, &miss_force_generic);
+  // Check that the key is a smi or a heap number convertible to a smi.
+  GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
 
   if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
     __ JumpIfNotSmi(value_reg, &transition_elements_kind);
@@ -4402,7 +4445,9 @@
 
   // This stub is meant to be tail-jumped to, the receiver must already
   // have been verified by the caller to not be a smi.
-  __ JumpIfNotSmi(key_reg, &miss_force_generic);
+
+  // Check that the key is a smi or a heap number convertible to a smi.
+  GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
 
   __ lw(elements_reg,
          FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index 8eefb23..cd2ccf8 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -135,6 +135,9 @@
     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
       JSObject::cast(this)->JSObjectVerify();
       break;
+    case JS_MODULE_TYPE:
+      JSModule::cast(this)->JSModuleVerify();
+      break;
     case JS_VALUE_TYPE:
       JSValue::cast(this)->JSValueVerify();
       break;
@@ -366,6 +369,15 @@
 }
 
 
+void JSModule::JSModuleVerify() {
+  Object* v = context();
+  if (v->IsHeapObject()) {
+    VerifyHeapPointer(v);
+  }
+  CHECK(v->IsUndefined() || v->IsModuleContext());
+}
+
+
 void JSValue::JSValueVerify() {
   Object* v = value();
   if (v->IsHeapObject()) {
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 49c8db8..677d567 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -581,7 +581,8 @@
             map == heap->catch_context_map() ||
             map == heap->with_context_map() ||
             map == heap->global_context_map() ||
-            map == heap->block_context_map());
+            map == heap->block_context_map() ||
+            map == heap->module_context_map());
   }
   return false;
 }
@@ -594,6 +595,13 @@
 }
 
 
+bool Object::IsModuleContext() {
+  return Object::IsHeapObject() &&
+      HeapObject::cast(this)->map() ==
+      HeapObject::cast(this)->GetHeap()->module_context_map();
+}
+
+
 bool Object::IsScopeInfo() {
   return Object::IsHeapObject() &&
       HeapObject::cast(this)->map() ==
@@ -613,6 +621,7 @@
 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
 TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
+TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
@@ -1436,6 +1445,8 @@
   // field operations considerably on average.
   if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
   switch (type) {
+    case JS_MODULE_TYPE:
+      return JSModule::kSize;
     case JS_GLOBAL_PROXY_TYPE:
       return JSGlobalProxy::kSize;
     case JS_GLOBAL_OBJECT_TYPE:
@@ -1922,15 +1933,15 @@
 }
 
 
-Smi* DescriptorArray::GetDetails(int descriptor_number) {
+PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
   ASSERT(descriptor_number < number_of_descriptors());
-  return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
+  Object* details = GetContentArray()->get(ToDetailsIndex(descriptor_number));
+  return PropertyDetails(Smi::cast(details));
 }
 
 
 PropertyType DescriptorArray::GetType(int descriptor_number) {
-  ASSERT(descriptor_number < number_of_descriptors());
-  return PropertyDetails(GetDetails(descriptor_number)).type();
+  return GetDetails(descriptor_number).type();
 }
 
 
@@ -1993,15 +2004,10 @@
 }
 
 
-bool DescriptorArray::IsDontEnum(int descriptor_number) {
-  return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
-}
-
-
 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
   desc->Init(GetKey(descriptor_number),
              GetValue(descriptor_number),
-             PropertyDetails(GetDetails(descriptor_number)));
+             GetDetails(descriptor_number));
 }
 
 
@@ -4078,6 +4084,16 @@
 }
 
 
+ACCESSORS(JSModule, context, Object, kContextOffset)
+
+
+JSModule* JSModule::cast(Object* obj) {
+  ASSERT(obj->IsJSModule());
+  ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
+  return reinterpret_cast<JSModule*>(obj);
+}
+
+
 ACCESSORS(JSValue, value, Object, kValueOffset)
 
 
diff --git a/src/objects-printer.cc b/src/objects-printer.cc
index 2353a95..7d6ef67 100644
--- a/src/objects-printer.cc
+++ b/src/objects-printer.cc
@@ -135,6 +135,9 @@
     case ODDBALL_TYPE:
       Oddball::cast(this)->to_string()->Print(out);
       break;
+    case JS_MODULE_TYPE:
+      JSModule::cast(this)->JSModulePrint(out);
+      break;
     case JS_FUNCTION_TYPE:
       JSFunction::cast(this)->JSFunctionPrint(out);
       break;
@@ -152,7 +155,7 @@
       JSValue::cast(this)->value()->Print(out);
       break;
     case JS_DATE_TYPE:
-      JSDate::cast(this)->value()->Print(out);
+      JSDate::cast(this)->JSDatePrint(out);
       break;
     case CODE_TYPE:
       Code::cast(this)->CodePrint(out);
@@ -437,6 +440,19 @@
 }
 
 
+void JSModule::JSModulePrint(FILE* out) {
+  HeapObject::PrintHeader(out, "JSModule");
+  PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+  PrintF(out, " - context = ");
+  context()->Print(out);
+  PrintElementsKind(out, this->map()->elements_kind());
+  PrintF(out, " {\n");
+  PrintProperties(out);
+  PrintElements(out);
+  PrintF(out, " }\n");
+}
+
+
 static const char* TypeToString(InstanceType type) {
   switch (type) {
     case INVALID_TYPE: return "INVALID";
@@ -483,6 +499,7 @@
     case ODDBALL_TYPE: return "ODDBALL";
     case JS_GLOBAL_PROPERTY_CELL_TYPE: return "JS_GLOBAL_PROPERTY_CELL";
     case SHARED_FUNCTION_INFO_TYPE: return "SHARED_FUNCTION_INFO";
+    case JS_MODULE_TYPE: return "JS_MODULE";
     case JS_FUNCTION_TYPE: return "JS_FUNCTION";
     case CODE_TYPE: return "CODE";
     case JS_ARRAY_TYPE: return "JS_ARRAY";
diff --git a/src/objects-visiting-inl.h b/src/objects-visiting-inl.h
index 627d1bc..8ba92f7 100644
--- a/src/objects-visiting-inl.h
+++ b/src/objects-visiting-inl.h
@@ -72,9 +72,7 @@
 
   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
 
-  table_.Register(kVisitJSFunction,
-                  &JSObjectVisitor::
-                      template VisitSpecialized<JSFunction::kSize>);
+  table_.Register(kVisitJSFunction, &VisitJSFunction);
 
   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
 
diff --git a/src/objects-visiting.cc b/src/objects-visiting.cc
index c7c8a87..a2dc43e 100644
--- a/src/objects-visiting.cc
+++ b/src/objects-visiting.cc
@@ -133,6 +133,7 @@
 
     case JS_OBJECT_TYPE:
     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+    case JS_MODULE_TYPE:
     case JS_VALUE_TYPE:
     case JS_DATE_TYPE:
     case JS_ARRAY_TYPE:
diff --git a/src/objects-visiting.h b/src/objects-visiting.h
index 26e79ae..b476dfe 100644
--- a/src/objects-visiting.h
+++ b/src/objects-visiting.h
@@ -289,6 +289,23 @@
   }
 
  private:
+  static inline int VisitJSFunction(Map* map, HeapObject* object) {
+    Heap* heap = map->GetHeap();
+    VisitPointers(heap,
+                  HeapObject::RawField(object, JSFunction::kPropertiesOffset),
+                  HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
+
+    // Don't visit code entry. We are using this visitor only during scavenges.
+
+    VisitPointers(
+        heap,
+        HeapObject::RawField(object,
+                             JSFunction::kCodeEntryOffset + kPointerSize),
+        HeapObject::RawField(object,
+                             JSFunction::kNonWeakFieldsEndOffset));
+    return JSFunction::kSize;
+  }
+
   static inline int VisitByteArray(Map* map, HeapObject* object) {
     return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
   }
diff --git a/src/objects.cc b/src/objects.cc
index 961876b..ef9d498 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -1338,6 +1338,7 @@
       break;
     case JS_OBJECT_TYPE:
     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+    case JS_MODULE_TYPE:
     case JS_VALUE_TYPE:
     case JS_DATE_TYPE:
     case JS_ARRAY_TYPE:
@@ -2321,7 +2322,7 @@
   }
   // If the transition already exists, return its descriptor.
   if (index != DescriptorArray::kNotFound) {
-    PropertyDetails details(descriptors->GetDetails(index));
+    PropertyDetails details = descriptors->GetDetails(index);
     if (details.type() == ELEMENTS_TRANSITION) {
       return descriptors->GetValue(index);
     } else {
@@ -3025,7 +3026,6 @@
     String* name,
     Object* value,
     PropertyAttributes attributes) {
-
   // Make sure that the top context does not change when doing callbacks or
   // interceptor calls.
   AssertNoContextChange ncc;
@@ -3094,7 +3094,6 @@
       return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
     case HANDLER:
       UNREACHABLE();
-      return value;
   }
   UNREACHABLE();  // keep the compiler happy
   return value;
@@ -3345,7 +3344,7 @@
 
   DescriptorArray* descs = map_of_this->instance_descriptors();
   for (int i = 0; i < descs->number_of_descriptors(); i++) {
-    PropertyDetails details(descs->GetDetails(i));
+    PropertyDetails details = descs->GetDetails(i);
     switch (details.type()) {
       case CONSTANT_FUNCTION: {
         PropertyDetails d =
@@ -4207,7 +4206,7 @@
   int result = 0;
   DescriptorArray* descs = instance_descriptors();
   for (int i = 0; i < descs->number_of_descriptors(); i++) {
-    PropertyDetails details(descs->GetDetails(i));
+    PropertyDetails details = descs->GetDetails(i);
     if (descs->IsProperty(i) && (details.attributes() & filter) == 0) {
       result++;
     }
@@ -5688,7 +5687,7 @@
                                        int src_index,
                                        const WhitenessWitness& witness) {
   Object* value = src->GetValue(src_index);
-  PropertyDetails details(src->GetDetails(src_index));
+  PropertyDetails details = src->GetDetails(src_index);
   if (details.type() == CALLBACKS && value->IsAccessorPair()) {
     MaybeObject* maybe_copy =
         AccessorPair::cast(value)->CopyWithoutTransitions();
@@ -5731,7 +5730,7 @@
   if (replacing) {
     // We are replacing an existing descriptor.  We keep the enumeration
     // index of a visible property.
-    PropertyType t = PropertyDetails(GetDetails(index)).type();
+    PropertyType t = GetDetails(index).type();
     if (t == CONSTANT_FUNCTION ||
         t == FIELD ||
         t == CALLBACKS ||
@@ -5758,8 +5757,7 @@
   int enumeration_index = NextEnumerationIndex();
   if (!descriptor->ContainsTransition()) {
     if (keep_enumeration_index) {
-      descriptor->SetEnumerationIndex(
-          PropertyDetails(GetDetails(index)).index());
+      descriptor->SetEnumerationIndex(GetDetails(index).index());
     } else {
       descriptor->SetEnumerationIndex(enumeration_index);
       ++enumeration_index;
@@ -5903,10 +5901,10 @@
     ASSERT(hash == mid_hash);
     // There might be more, so we find the first one and
     // check them all to see if we have a match.
-    if (name == mid_name  && !is_null_descriptor(mid)) return mid;
+    if (name == mid_name  && !IsNullDescriptor(mid)) return mid;
     while ((mid > low) && (GetKey(mid - 1)->Hash() == hash)) mid--;
     for (; (mid <= high) && (GetKey(mid)->Hash() == hash); mid++) {
-      if (GetKey(mid)->Equals(name) && !is_null_descriptor(mid)) return mid;
+      if (GetKey(mid)->Equals(name) && !IsNullDescriptor(mid)) return mid;
     }
     break;
   }
@@ -5920,7 +5918,7 @@
     String* entry = GetKey(number);
     if ((entry->Hash() == hash) &&
         name->Equals(entry) &&
-        !is_null_descriptor(number)) {
+        !IsNullDescriptor(number)) {
       return number;
     }
   }
diff --git a/src/objects.h b/src/objects.h
index b728d0c..d682cde 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -59,6 +59,7 @@
 //           - JSWeakMap
 //           - JSRegExp
 //           - JSFunction
+//           - JSModule
 //           - GlobalObject
 //             - JSGlobalObject
 //             - JSBuiltinsObject
@@ -306,6 +307,7 @@
   V(JS_DATE_TYPE)                                                              \
   V(JS_OBJECT_TYPE)                                                            \
   V(JS_CONTEXT_EXTENSION_OBJECT_TYPE)                                          \
+  V(JS_MODULE_TYPE)                                                            \
   V(JS_GLOBAL_OBJECT_TYPE)                                                     \
   V(JS_BUILTINS_OBJECT_TYPE)                                                   \
   V(JS_GLOBAL_PROXY_TYPE)                                                      \
@@ -626,6 +628,7 @@
   JS_DATE_TYPE,
   JS_OBJECT_TYPE,
   JS_CONTEXT_EXTENSION_OBJECT_TYPE,
+  JS_MODULE_TYPE,
   JS_GLOBAL_OBJECT_TYPE,
   JS_BUILTINS_OBJECT_TYPE,
   JS_GLOBAL_PROXY_TYPE,
@@ -803,6 +806,7 @@
   V(JSReceiver)                                \
   V(JSObject)                                  \
   V(JSContextExtensionObject)                  \
+  V(JSModule)                                  \
   V(Map)                                       \
   V(DescriptorArray)                           \
   V(DeoptimizationInputData)                   \
@@ -812,6 +816,7 @@
   V(FixedDoubleArray)                          \
   V(Context)                                   \
   V(GlobalContext)                             \
+  V(ModuleContext)                             \
   V(ScopeInfo)                                 \
   V(JSFunction)                                \
   V(Code)                                      \
@@ -2468,7 +2473,7 @@
   // Accessors for fetching instance descriptor at descriptor number.
   inline String* GetKey(int descriptor_number);
   inline Object* GetValue(int descriptor_number);
-  inline Smi* GetDetails(int descriptor_number);
+  inline PropertyDetails GetDetails(int descriptor_number);
   inline PropertyType GetType(int descriptor_number);
   inline int GetFieldIndex(int descriptor_number);
   inline JSFunction* GetConstantFunction(int descriptor_number);
@@ -2477,7 +2482,6 @@
   inline bool IsProperty(int descriptor_number);
   inline bool IsTransitionOnly(int descriptor_number);
   inline bool IsNullDescriptor(int descriptor_number);
-  inline bool IsDontEnum(int descriptor_number);
 
   class WhitenessWitness {
    public:
@@ -2631,10 +2635,6 @@
     return descriptor_number << 1;
   }
 
-  bool is_null_descriptor(int descriptor_number) {
-    return PropertyDetails(GetDetails(descriptor_number)).type() ==
-        NULL_DESCRIPTOR;
-  }
   // Swap operation on FixedArray without using write barriers.
   static inline void NoIncrementalWriteBarrierSwap(
       FixedArray* array, int first, int second);
@@ -5691,6 +5691,35 @@
 };
 
 
+// Representation for module instance objects.
+class JSModule: public JSObject {
+ public:
+  // [context]: the context holding the module's locals, or undefined if none.
+  DECL_ACCESSORS(context, Object)
+
+  // Casting.
+  static inline JSModule* cast(Object* obj);
+
+  // Dispatched behavior.
+#ifdef OBJECT_PRINT
+  inline void JSModulePrint() {
+    JSModulePrint(stdout);
+  }
+  void JSModulePrint(FILE* out);
+#endif
+#ifdef DEBUG
+  void JSModuleVerify();
+#endif
+
+  // Layout description.
+  static const int kContextOffset = JSObject::kHeaderSize;
+  static const int kSize = kContextOffset + kPointerSize;
+
+ private:
+  DISALLOW_IMPLICIT_CONSTRUCTORS(JSModule);
+};
+
+
 // JSFunction describes JavaScript functions.
 class JSFunction: public JSObject {
  public:
diff --git a/src/parser.cc b/src/parser.cc
index da68041..8620519 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -1333,11 +1333,19 @@
 
   Expect(Token::RBRACE, CHECK_OK);
   scope->set_end_position(scanner().location().end_pos);
-  body->set_block_scope(scope);
+  body->set_scope(scope);
 
-  scope->interface()->Freeze(ok);
+  // Instance objects have to be created ahead of time (before code generation
+  // linking them) because of potentially cyclic references between them.
+  // We create them here, to avoid another pass over the AST.
+  Interface* interface = scope->interface();
+  interface->MakeModule(ok);
   ASSERT(ok);
-  return factory()->NewModuleLiteral(body, scope->interface());
+  interface->MakeSingleton(Isolate::Current()->factory()->NewJSModule(), ok);
+  ASSERT(ok);
+  interface->Freeze(ok);
+  ASSERT(ok);
+  return factory()->NewModuleLiteral(body, interface);
 }
 
 
@@ -1403,7 +1411,14 @@
 #ifdef DEBUG
   if (FLAG_print_interface_details) PrintF("# Url ");
 #endif
-  return factory()->NewModuleUrl(symbol);
+
+  Module* result = factory()->NewModuleUrl(symbol);
+  Interface* interface = result->interface();
+  interface->MakeSingleton(Isolate::Current()->factory()->NewJSModule(), ok);
+  ASSERT(ok);
+  interface->Freeze(ok);
+  ASSERT(ok);
+  return result;
 }
 
 
@@ -2015,7 +2030,7 @@
   Expect(Token::RBRACE, CHECK_OK);
   block_scope->set_end_position(scanner().location().end_pos);
   block_scope = block_scope->FinalizeBlockScope();
-  body->set_block_scope(block_scope);
+  body->set_scope(block_scope);
   return body;
 }
 
@@ -2254,7 +2269,7 @@
     // Global variable declarations must be compiled in a specific
     // way. When the script containing the global variable declaration
     // is entered, the global variable must be declared, so that if it
-    // doesn't exist (not even in a prototype of the global object) it
+    // doesn't exist (on the global object itself, see ES5 errata) it
     // gets created with an initial undefined value. This is handled
     // by the declarations part of the function representing the
     // top-level global code; see Runtime::DeclareGlobalVariable. If
@@ -2917,7 +2932,7 @@
         top_scope_ = saved_scope;
         for_scope->set_end_position(scanner().location().end_pos);
         for_scope = for_scope->FinalizeBlockScope();
-        body_block->set_block_scope(for_scope);
+        body_block->set_scope(for_scope);
         // Parsed for-in loop w/ let declaration.
         return loop;
 
@@ -2997,7 +3012,7 @@
     Block* result = factory()->NewBlock(NULL, 2, false);
     result->AddStatement(init);
     result->AddStatement(loop);
-    result->set_block_scope(for_scope);
+    result->set_scope(for_scope);
     if (loop) loop->Initialize(NULL, cond, next, body);
     return result;
   } else {
@@ -4460,15 +4475,15 @@
     Variable* fvar = NULL;
     Token::Value fvar_init_op = Token::INIT_CONST;
     if (type == FunctionLiteral::NAMED_EXPRESSION) {
-      VariableMode fvar_mode;
-      if (is_extended_mode()) {
-        fvar_mode = CONST_HARMONY;
-        fvar_init_op = Token::INIT_CONST_HARMONY;
-      } else {
-        fvar_mode = CONST;
-      }
-      fvar =
-          top_scope_->DeclareFunctionVar(function_name, fvar_mode, factory());
+      if (is_extended_mode()) fvar_init_op = Token::INIT_CONST_HARMONY;
+      VariableMode fvar_mode = is_extended_mode() ? CONST_HARMONY : CONST;
+      fvar = new(zone()) Variable(top_scope_,
+         function_name, fvar_mode, true /* is valid LHS */,
+         Variable::NORMAL, kCreatedInitialized);
+      VariableProxy* proxy = factory()->NewVariableProxy(fvar);
+      VariableDeclaration* fvar_declaration =
+          factory()->NewVariableDeclaration(proxy, fvar_mode, top_scope_);
+      top_scope_->DeclareFunctionVar(fvar_declaration);
     }
 
     // Determine whether the function will be lazily compiled.
diff --git a/src/platform-cygwin.cc b/src/platform-cygwin.cc
index 8b1e381..089ea38 100644
--- a/src/platform-cygwin.cc
+++ b/src/platform-cygwin.cc
@@ -620,11 +620,8 @@
       : Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
         interval_(interval) {}
 
-  static void SetUp() {
-    if (!mutex_) {
-      mutex_ = OS::CreateMutex();
-    }
-  }
+  static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+  static void TearDown() { delete mutex_; }
 
   static void AddActiveSampler(Sampler* sampler) {
     ScopedLock lock(mutex_);
@@ -749,6 +746,12 @@
 }
 
 
+void OS::TearDown() {
+  SamplerThread::TearDown();
+  delete limit_mutex;
+}
+
+
 Sampler::Sampler(Isolate* isolate, int interval)
     : isolate_(isolate),
       interval_(interval),
diff --git a/src/platform-freebsd.cc b/src/platform-freebsd.cc
index 6b1c987..4a6845e 100644
--- a/src/platform-freebsd.cc
+++ b/src/platform-freebsd.cc
@@ -716,11 +716,8 @@
       : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
         interval_(interval) {}
 
-  static void SetUp() {
-    if (!mutex_) {
-      mutex_ = OS::CreateMutex();
-    }
-  }
+  static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+  static void TearDown() { delete mutex_; }
 
   static void AddActiveSampler(Sampler* sampler) {
     ScopedLock lock(mutex_);
@@ -864,6 +861,12 @@
 }
 
 
+void OS::TearDown() {
+  SignalSender::TearDown();
+  delete limit_mutex;
+}
+
+
 Sampler::Sampler(Isolate* isolate, int interval)
     : isolate_(isolate),
       interval_(interval),
diff --git a/src/platform-linux.cc b/src/platform-linux.cc
index 9bea32d..5c05a3a 100644
--- a/src/platform-linux.cc
+++ b/src/platform-linux.cc
@@ -964,6 +964,46 @@
   __sigset_t uc_sigmask;
 } ucontext_t;
 
+#elif !defined(__GLIBC__) && defined(__i386__)
+// x86 version for Android.
+struct _libc_fpreg {
+  uint16_t significand[4];
+  uint16_t exponent;
+};
+
+struct _libc_fpstate {
+  uint64_t cw;
+  uint64_t sw;
+  uint64_t tag;
+  uint64_t ipoff;
+  uint64_t cssel;
+  uint64_t dataoff;
+  uint64_t datasel;
+  struct _libc_fpreg _st[8];
+  uint64_t status;
+};
+
+typedef struct _libc_fpstate *fpregset_t;
+
+typedef struct mcontext {
+  int32_t gregs[19];
+  fpregset_t fpregs;
+  int64_t oldmask;
+  int64_t cr2;
+} mcontext_t;
+
+typedef uint64_t __sigset_t;
+
+typedef struct ucontext {
+  uint64_t uc_flags;
+  struct ucontext *uc_link;
+  stack_t uc_stack;
+  mcontext_t uc_mcontext;
+  __sigset_t uc_sigmask;
+  struct _libc_fpstate __fpregs_mem;
+} ucontext_t;
+
+enum { REG_EBP = 6, REG_ESP = 7, REG_EIP = 14 };
 #endif
 
 
@@ -1055,11 +1095,8 @@
         vm_tgid_(getpid()),
         interval_(interval) {}
 
-  static void SetUp() {
-    if (!mutex_) {
-      mutex_ = OS::CreateMutex();
-    }
-  }
+  static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+  static void TearDown() { delete mutex_; }
 
   static void InstallSignalHandler() {
     struct sigaction sa;
@@ -1238,6 +1275,12 @@
 }
 
 
+void OS::TearDown() {
+  SignalSender::TearDown();
+  delete limit_mutex;
+}
+
+
 Sampler::Sampler(Isolate* isolate, int interval)
     : isolate_(isolate),
       interval_(interval),
diff --git a/src/platform-macos.cc b/src/platform-macos.cc
index afcd80a..a937ed3 100644
--- a/src/platform-macos.cc
+++ b/src/platform-macos.cc
@@ -743,11 +743,8 @@
       : Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
         interval_(interval) {}
 
-  static void SetUp() {
-    if (!mutex_) {
-      mutex_ = OS::CreateMutex();
-    }
-  }
+  static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+  static void TearDown() { delete mutex_; }
 
   static void AddActiveSampler(Sampler* sampler) {
     ScopedLock lock(mutex_);
@@ -881,6 +878,12 @@
 }
 
 
+void OS::TearDown() {
+  SamplerThread::TearDown();
+  delete limit_mutex;
+}
+
+
 Sampler::Sampler(Isolate* isolate, int interval)
     : isolate_(isolate),
       interval_(interval),
diff --git a/src/platform-nullos.cc b/src/platform-nullos.cc
index 42799db..679ef8e 100644
--- a/src/platform-nullos.cc
+++ b/src/platform-nullos.cc
@@ -91,6 +91,11 @@
 }
 
 
+void OS::TearDown() {
+  UNIMPLEMENTED();
+}
+
+
 // Returns the accumulated user time for thread.
 int OS::GetUserTime(uint32_t* secs,  uint32_t* usecs) {
   UNIMPLEMENTED();
diff --git a/src/platform-openbsd.cc b/src/platform-openbsd.cc
index 2b2d530..ba33a84 100644
--- a/src/platform-openbsd.cc
+++ b/src/platform-openbsd.cc
@@ -793,11 +793,8 @@
         vm_tgid_(getpid()),
         interval_(interval) {}
 
-  static void SetUp() {
-    if (!mutex_) {
-      mutex_ = OS::CreateMutex();
-    }
-  }
+  static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+  static void TearDown() { delete mutex_; }
 
   static void InstallSignalHandler() {
     struct sigaction sa;
@@ -948,6 +945,12 @@
 }
 
 
+void OS::TearDown() {
+  SignalSender::TearDown();
+  delete limit_mutex;
+}
+
+
 Sampler::Sampler(Isolate* isolate, int interval)
     : isolate_(isolate),
       interval_(interval),
diff --git a/src/platform-solaris.cc b/src/platform-solaris.cc
index be8bbfc..0106e57 100644
--- a/src/platform-solaris.cc
+++ b/src/platform-solaris.cc
@@ -712,11 +712,8 @@
       : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
         interval_(interval) {}
 
-  static void SetUp() {
-    if (!mutex_) {
-      mutex_ = OS::CreateMutex();
-    }
-  }
+  static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+  static void TearDown() { delete mutex_; }
 
   static void InstallSignalHandler() {
     struct sigaction sa;
@@ -870,6 +867,12 @@
 }
 
 
+void OS::TearDown() {
+  SignalSender::TearDown();
+  delete limit_mutex;
+}
+
+
 Sampler::Sampler(Isolate* isolate, int interval)
     : isolate_(isolate),
       interval_(interval),
diff --git a/src/platform-win32.cc b/src/platform-win32.cc
index e36fc87..5a603d6 100644
--- a/src/platform-win32.cc
+++ b/src/platform-win32.cc
@@ -1949,11 +1949,8 @@
       : Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
         interval_(interval) {}
 
-  static void SetUp() {
-    if (!mutex_) {
-      mutex_ = OS::CreateMutex();
-    }
-  }
+  static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+  static void TearDown() { delete mutex_; }
 
   static void AddActiveSampler(Sampler* sampler) {
     ScopedLock lock(mutex_);
@@ -2078,6 +2075,12 @@
 }
 
 
+void OS::TearDown() {
+  SamplerThread::TearDown();
+  delete limit_mutex;
+}
+
+
 Sampler::Sampler(Isolate* isolate, int interval)
     : isolate_(isolate),
       interval_(interval),
diff --git a/src/platform.h b/src/platform.h
index 3b2aa3c..168791a 100644
--- a/src/platform.h
+++ b/src/platform.h
@@ -123,6 +123,9 @@
   // called after CPU initialization.
   static void PostSetUp();
 
+  // Clean up platform-OS-related things. Called once at VM shutdown.
+  static void TearDown();
+
   // Returns the accumulated user time for thread. This routine
   // can be used for profiling. The implementation should
   // strive for high-precision timer resolution, preferable
diff --git a/src/profile-generator-inl.h b/src/profile-generator-inl.h
index 65369be..284e2df 100644
--- a/src/profile-generator-inl.h
+++ b/src/profile-generator-inl.h
@@ -95,6 +95,12 @@
 }
 
 
+HeapEntry* HeapGraphEdge::from() const {
+  return const_cast<HeapEntry*>(
+      reinterpret_cast<const HeapEntry*>(this - child_index_) - 1);
+}
+
+
 SnapshotObjectId HeapObjectsMap::GetNthGcSubrootId(int delta) {
   return kGcRootsFirstSubrootId + delta * kObjectIdStep;
 }
diff --git a/src/profile-generator.cc b/src/profile-generator.cc
index ee14fa9..5a5531a 100644
--- a/src/profile-generator.cc
+++ b/src/profile-generator.cc
@@ -957,11 +957,6 @@
 }
 
 
-HeapEntry* HeapGraphEdge::From() {
-  return reinterpret_cast<HeapEntry*>(this - child_index_) - 1;
-}
-
-
 void HeapEntry::Init(HeapSnapshot* snapshot,
                      Type type,
                      const char* name,
@@ -972,6 +967,7 @@
   snapshot_ = snapshot;
   type_ = type;
   painted_ = false;
+  reachable_from_window_ = false;
   name_ = name;
   self_size_ = self_size;
   retained_size_ = 0;
@@ -1134,6 +1130,7 @@
       gc_roots_entry_(NULL),
       natives_root_entry_(NULL),
       raw_entries_(NULL),
+      number_of_edges_(0),
       max_snapshot_js_object_id_(0) {
   STATIC_CHECK(
       sizeof(HeapGraphEdge) ==
@@ -1167,6 +1164,7 @@
                                    int children_count,
                                    int retainers_count) {
   ASSERT(raw_entries_ == NULL);
+  number_of_edges_ = children_count;
   raw_entries_size_ =
       HeapEntry::EntriesSize(entries_count, children_count, retainers_count);
   raw_entries_ = NewArray<char>(raw_entries_size_);
@@ -1312,10 +1310,8 @@
     VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
 
 HeapObjectsMap::HeapObjectsMap()
-    : initial_fill_mode_(true),
-      next_id_(kFirstAvailableObjectId),
-      entries_map_(AddressesMatch),
-      entries_(new List<EntryInfo>()) {
+    : next_id_(kFirstAvailableObjectId),
+      entries_map_(AddressesMatch) {
   // This dummy element solves a problem with entries_map_.
   // When we do lookup in HashMap we see no difference between two cases:
   // it has an entry with NULL as the value or it has created
@@ -1323,37 +1319,15 @@
   // With such dummy element we have a guaranty that all entries_map_ entries
   // will have the value field grater than 0.
   // This fact is using in MoveObject method.
-  entries_->Add(EntryInfo(0, NULL));
-}
-
-
-HeapObjectsMap::~HeapObjectsMap() {
-  delete entries_;
+  entries_.Add(EntryInfo(0, NULL, 0));
 }
 
 
 void HeapObjectsMap::SnapshotGenerationFinished() {
-  initial_fill_mode_ = false;
   RemoveDeadEntries();
 }
 
 
-SnapshotObjectId HeapObjectsMap::FindObject(Address addr) {
-  if (!initial_fill_mode_) {
-    SnapshotObjectId existing = FindEntry(addr);
-    if (existing != 0) return existing;
-  }
-  SnapshotObjectId id = next_id_;
-  next_id_ += kObjectIdStep;
-  AddEntry(addr, id);
-  // Here and in other places the length of entries_ list has to be
-  // the same or greater than the length of entries_map_. But entries_ list
-  // has a dummy element at index 0.
-  ASSERT(static_cast<uint32_t>(entries_->length()) > entries_map_.occupancy());
-  return id;
-}
-
-
 void HeapObjectsMap::MoveObject(Address from, Address to) {
   ASSERT(to != NULL);
   ASSERT(from != NULL);
@@ -1362,7 +1336,7 @@
   if (from_value == NULL) return;
   int from_entry_info_index =
       static_cast<int>(reinterpret_cast<intptr_t>(from_value));
-  entries_->at(from_entry_info_index).addr = to;
+  entries_.at(from_entry_info_index).addr = to;
   HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
   if (to_entry->value != NULL) {
     int to_entry_info_index =
@@ -1371,56 +1345,39 @@
     // value in addr field. It is bad because later at RemoveDeadEntries
     // one of this entry will be removed with the corresponding entries_map_
     // entry.
-    entries_->at(to_entry_info_index).addr = NULL;
+    entries_.at(to_entry_info_index).addr = NULL;
   }
   to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
 }
 
 
-void HeapObjectsMap::AddEntry(Address addr, SnapshotObjectId id) {
-  HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
-  ASSERT(entry->value == NULL);
-  ASSERT(entries_->length() > 0 &&
-         entries_->at(0).id == 0 &&
-         entries_->at(0).addr == NULL);
-  ASSERT(entries_->at(entries_->length() - 1).id < id);
-  entry->value = reinterpret_cast<void*>(entries_->length());
-  entries_->Add(EntryInfo(id, addr));
-  ASSERT(static_cast<uint32_t>(entries_->length()) > entries_map_.occupancy());
-}
-
-
 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
   HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
-  if (entry != NULL) {
-    int entry_index =
-        static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
-    EntryInfo& entry_info = entries_->at(entry_index);
-    entry_info.accessed = true;
-    ASSERT(static_cast<uint32_t>(entries_->length()) >
-           entries_map_.occupancy());
-    return entry_info.id;
-  } else {
-    return 0;
-  }
+  if (entry == NULL) return 0;
+  int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
+  EntryInfo& entry_info = entries_.at(entry_index);
+  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
+  return entry_info.id;
 }
 
 
-SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr) {
-  ASSERT(static_cast<uint32_t>(entries_->length()) > entries_map_.occupancy());
+SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
+                                                unsigned int size) {
+  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
   HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
   if (entry->value != NULL) {
     int entry_index =
         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
-    EntryInfo& entry_info = entries_->at(entry_index);
+    EntryInfo& entry_info = entries_.at(entry_index);
     entry_info.accessed = true;
+    entry_info.size = size;
     return entry_info.id;
   }
-  entry->value = reinterpret_cast<void*>(entries_->length());
+  entry->value = reinterpret_cast<void*>(entries_.length());
   SnapshotObjectId id = next_id_;
   next_id_ += kObjectIdStep;
-  entries_->Add(EntryInfo(id, addr));
-  ASSERT(static_cast<uint32_t>(entries_->length()) > entries_map_.occupancy());
+  entries_.Add(EntryInfo(id, addr, size));
+  ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
   return id;
 }
 
@@ -1432,13 +1389,12 @@
 void HeapObjectsMap::UpdateHeapObjectsMap() {
   HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
                           "HeapSnapshotsCollection::UpdateHeapObjectsMap");
-  HeapIterator iterator(HeapIterator::kFilterUnreachable);
+  HeapIterator iterator;
   for (HeapObject* obj = iterator.next();
        obj != NULL;
        obj = iterator.next()) {
-    FindOrAddEntry(obj->address());
+    FindOrAddEntry(obj->address(), obj->Size());
   }
-  initial_fill_mode_ = false;
   RemoveDeadEntries();
 }
 
@@ -1448,22 +1404,27 @@
   time_intervals_.Add(TimeInterval(next_id_));
   int prefered_chunk_size = stream->GetChunkSize();
   List<uint32_t> stats_buffer;
-  ASSERT(!entries_->is_empty());
-  EntryInfo* entry_info = &entries_->first();
-  EntryInfo* end_entry_info = &entries_->last() + 1;
+  ASSERT(!entries_.is_empty());
+  EntryInfo* entry_info = &entries_.first();
+  EntryInfo* end_entry_info = &entries_.last() + 1;
   for (int time_interval_index = 0;
        time_interval_index < time_intervals_.length();
        ++time_interval_index) {
     TimeInterval& time_interval = time_intervals_[time_interval_index];
     SnapshotObjectId time_interval_id = time_interval.id;
-    uint32_t entries_count = 0;
+    uint32_t entries_size = 0;
+    EntryInfo* start_entry_info = entry_info;
     while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
-      ++entries_count;
+      entries_size += entry_info->size;
       ++entry_info;
     }
-    if (time_interval.count != entries_count) {
+    uint32_t entries_count =
+        static_cast<uint32_t>(entry_info - start_entry_info);
+    if (time_interval.count != entries_count ||
+        time_interval.size != entries_size) {
       stats_buffer.Add(time_interval_index);
       stats_buffer.Add(time_interval.count = entries_count);
+      stats_buffer.Add(time_interval.size = entries_size);
       if (stats_buffer.length() >= prefered_chunk_size) {
         OutputStream::WriteResult result = stream->WriteUint32Chunk(
             &stats_buffer.first(), stats_buffer.length());
@@ -1483,17 +1444,17 @@
 
 
 void HeapObjectsMap::RemoveDeadEntries() {
-  ASSERT(entries_->length() > 0 &&
-         entries_->at(0).id == 0 &&
-         entries_->at(0).addr == NULL);
+  ASSERT(entries_.length() > 0 &&
+         entries_.at(0).id == 0 &&
+         entries_.at(0).addr == NULL);
   int first_free_entry = 1;
-  for (int i = 1; i < entries_->length(); ++i) {
-    EntryInfo& entry_info = entries_->at(i);
+  for (int i = 1; i < entries_.length(); ++i) {
+    EntryInfo& entry_info = entries_.at(i);
     if (entry_info.accessed) {
       if (first_free_entry != i) {
-        entries_->at(first_free_entry) = entry_info;
+        entries_.at(first_free_entry) = entry_info;
       }
-      entries_->at(first_free_entry).accessed = false;
+      entries_.at(first_free_entry).accessed = false;
       HashMap::Entry* entry = entries_map_.Lookup(
           entry_info.addr, AddressHash(entry_info.addr), false);
       ASSERT(entry);
@@ -1505,8 +1466,8 @@
       }
     }
   }
-  entries_->Rewind(first_free_entry);
-  ASSERT(static_cast<uint32_t>(entries_->length()) - 1 ==
+  entries_.Rewind(first_free_entry);
+  ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
          entries_map_.occupancy());
 }
 
@@ -1594,7 +1555,7 @@
   for (HeapObject* obj = iterator.next();
        obj != NULL;
        obj = iterator.next()) {
-    if (ids_.FindObject(obj->address()) == id) {
+    if (ids_.FindEntry(obj->address()) == id) {
       ASSERT(object == NULL);
       object = obj;
       // Can't break -- kFilterUnreachable requires full heap traversal.
@@ -1895,10 +1856,13 @@
                                     const char* name,
                                     int children_count,
                                     int retainers_count) {
+  int object_size = object->Size();
+  SnapshotObjectId object_id =
+    collection_->GetObjectId(object->address(), object_size);
   return snapshot_->AddEntry(type,
                              name,
-                             collection_->GetObjectId(object->address()),
-                             object->Size(),
+                             object_id,
+                             object_size,
                              children_count,
                              retainers_count);
 }
@@ -2016,7 +1980,7 @@
     // We use JSGlobalProxy because this is what embedder (e.g. browser)
     // uses for the global object.
     JSGlobalProxy* proxy = JSGlobalProxy::cast(obj);
-    SetRootShortcutReference(proxy->map()->prototype());
+    SetWindowReference(proxy->map()->prototype());
   } else if (obj->IsJSObject()) {
     JSObject* js_obj = JSObject::cast(obj);
     ExtractClosureReferences(js_obj, entry);
@@ -2187,7 +2151,7 @@
   if (!js_obj->IsJSFunction()) return;
 
   JSFunction* func = JSFunction::cast(js_obj);
-  Context* context = func->context();
+  Context* context = func->context()->declaration_context();
   ScopeInfo* scope_info = context->closure()->shared()->scope_info();
 
   if (func->shared()->bound()) {
@@ -2284,15 +2248,15 @@
       Object* k = dictionary->KeyAt(i);
       if (dictionary->IsKey(k)) {
         Object* target = dictionary->ValueAt(i);
-        SetPropertyReference(
-            js_obj, entry, String::cast(k), target);
         // We assume that global objects can only have slow properties.
-        if (target->IsJSGlobalPropertyCell()) {
-          SetPropertyShortcutReference(js_obj,
-                                       entry,
-                                       String::cast(k),
-                                       JSGlobalPropertyCell::cast(
-                                           target)->value());
+        Object* value = target->IsJSGlobalPropertyCell()
+            ? JSGlobalPropertyCell::cast(target)->value()
+            : target;
+        if (String::cast(k)->length() > 0) {
+          SetPropertyReference(js_obj, entry, String::cast(k), value);
+        } else {
+          TagObject(value, "(hidden properties)");
+          SetInternalReference(js_obj, entry, "hidden_properties", value);
         }
       }
     }
@@ -2661,7 +2625,7 @@
 }
 
 
-void V8HeapExplorer::SetRootShortcutReference(Object* child_obj) {
+void V8HeapExplorer::SetWindowReference(Object* child_obj) {
   HeapEntry* child_entry = GetEntry(child_obj);
   ASSERT(child_entry != NULL);
   filler_->SetNamedAutoIndexReference(
@@ -2743,7 +2707,7 @@
     Handle<JSGlobalObject> global_obj = enumerator.at(i);
     Object* obj_document;
     if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
-       obj_document->IsJSObject()) {
+        obj_document->IsJSObject()) {
       JSObject* document = JSObject::cast(obj_document);
       Object* obj_url;
       if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
@@ -3297,19 +3261,61 @@
 }
 
 
-void HeapSnapshotGenerator::FillReversePostorderIndexes(
+bool HeapSnapshotGenerator::IsWindowReference(const HeapGraphEdge& edge) {
+  ASSERT(edge.from() == snapshot_->root());
+  return edge.type() == HeapGraphEdge::kShortcut;
+}
+
+
+void HeapSnapshotGenerator::MarkWindowReachableObjects() {
+  List<HeapEntry*> worklist;
+
+  Vector<HeapGraphEdge> children = snapshot_->root()->children();
+  for (int i = 0; i < children.length(); ++i) {
+    if (IsWindowReference(children[i])) {
+      worklist.Add(children[i].to());
+    }
+  }
+
+  while (!worklist.is_empty()) {
+    HeapEntry* entry = worklist.RemoveLast();
+    if (entry->reachable_from_window()) continue;
+    entry->set_reachable_from_window();
+    Vector<HeapGraphEdge> children = entry->children();
+    for (int i = 0; i < children.length(); ++i) {
+      HeapEntry* child = children[i].to();
+      if (!child->reachable_from_window()) {
+        worklist.Add(child);
+      }
+    }
+  }
+}
+
+
+static bool IsRetainingEdge(HeapGraphEdge* edge) {
+  if (edge->type() == HeapGraphEdge::kShortcut) return false;
+  // The edge is not retaining if it goes from system domain
+  // (i.e. an object not reachable from window) to the user domain
+  // (i.e. a reachable object).
+  return edge->from()->reachable_from_window()
+      || !edge->to()->reachable_from_window();
+}
+
+
+void HeapSnapshotGenerator::FillPostorderIndexes(
     Vector<HeapEntry*>* entries) {
   snapshot_->ClearPaint();
   int current_entry = 0;
   List<HeapEntry*> nodes_to_visit;
-  nodes_to_visit.Add(snapshot_->root());
+  HeapEntry* root = snapshot_->root();
+  nodes_to_visit.Add(root);
   snapshot_->root()->paint();
   while (!nodes_to_visit.is_empty()) {
     HeapEntry* entry = nodes_to_visit.last();
     Vector<HeapGraphEdge> children = entry->children();
     bool has_new_edges = false;
     for (int i = 0; i < children.length(); ++i) {
-      if (children[i].type() == HeapGraphEdge::kShortcut) continue;
+      if (entry != root && !IsRetainingEdge(&children[i])) continue;
       HeapEntry* child = children[i].to();
       if (!child->painted()) {
         nodes_to_visit.Add(child);
@@ -3344,6 +3350,7 @@
     const Vector<HeapEntry*>& entries,
     Vector<int>* dominators) {
   if (entries.length() == 0) return true;
+  HeapEntry* root = snapshot_->root();
   const int entries_length = entries.length(), root_index = entries_length - 1;
   static const int kNoDominator = -1;
   for (int i = 0; i < root_index; ++i) (*dominators)[i] = kNoDominator;
@@ -3372,8 +3379,8 @@
       int new_idom_index = kNoDominator;
       Vector<HeapGraphEdge*> rets = entries[i]->retainers();
       for (int j = 0; j < rets.length(); ++j) {
-        if (rets[j]->type() == HeapGraphEdge::kShortcut) continue;
-        int ret_index = rets[j]->From()->ordered_index();
+        if (rets[j]->from() != root && !IsRetainingEdge(rets[j])) continue;
+        int ret_index = rets[j]->from()->ordered_index();
         if (dominators->at(ret_index) != kNoDominator) {
           new_idom_index = new_idom_index == kNoDominator
               ? ret_index
@@ -3399,9 +3406,10 @@
 
 
 bool HeapSnapshotGenerator::SetEntriesDominators() {
-  // This array is used for maintaining reverse postorder of nodes.
+  MarkWindowReachableObjects();
+  // This array is used for maintaining postorder of nodes.
   ScopedVector<HeapEntry*> ordered_entries(snapshot_->entries()->length());
-  FillReversePostorderIndexes(&ordered_entries);
+  FillPostorderIndexes(&ordered_entries);
   ScopedVector<int> dominators(ordered_entries.length());
   if (!BuildDominatorTree(ordered_entries, &dominators)) return false;
   for (int i = 0; i < ordered_entries.length(); ++i) {
@@ -3576,14 +3584,37 @@
 }
 
 
+void HeapSnapshotJSONSerializer::CalculateNodeIndexes(
+    const List<HeapEntry*>& nodes) {
+  // type,name,id,self_size,retained_size,dominator,children_index.
+  const int node_fields_count = 7;
+  // Root must be the first.
+  ASSERT(nodes.first() == snapshot_->root());
+  // Rewrite node indexes, so they refer to actual array positions. Do this
+  // only once.
+  if (nodes[0]->entry_index() == -1) {
+    int index = 0;
+    for (int i = 0; i < nodes.length(); ++i, index += node_fields_count) {
+      nodes[i]->set_entry_index(index);
+    }
+  }
+}
+
+
 void HeapSnapshotJSONSerializer::SerializeImpl() {
+  List<HeapEntry*>& nodes = *(snapshot_->entries());
+  CalculateNodeIndexes(nodes);
   writer_->AddCharacter('{');
   writer_->AddString("\"snapshot\":{");
   SerializeSnapshot();
   if (writer_->aborted()) return;
   writer_->AddString("},\n");
   writer_->AddString("\"nodes\":[");
-  SerializeNodes();
+  SerializeNodes(nodes);
+  if (writer_->aborted()) return;
+  writer_->AddString("],\n");
+  writer_->AddString("\"edges\":[");
+  SerializeEdges(nodes);
   if (writer_->aborted()) return;
   writer_->AddString("],\n");
   writer_->AddString("\"strings\":[");
@@ -3630,7 +3661,8 @@
 }
 
 
-void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge) {
+void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
+                                               bool first_edge) {
   // The buffer needs space for 3 ints, 3 commas and \0
   static const int kBufferSize =
       MaxDecimalDigitsIn<sizeof(int)>::kSigned * 3 + 3 + 1;  // NOLINT
@@ -3640,7 +3672,9 @@
       || edge->type() == HeapGraphEdge::kWeak
       ? edge->index() : GetStringId(edge->name());
   int buffer_pos = 0;
-  buffer[buffer_pos++] = ',';
+  if (!first_edge) {
+    buffer[buffer_pos++] = ',';
+  }
   buffer_pos = itoa(edge->type(), buffer, buffer_pos);
   buffer[buffer_pos++] = ',';
   buffer_pos = itoa(edge_name_or_index, buffer, buffer_pos);
@@ -3651,17 +3685,33 @@
 }
 
 
-void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
+void HeapSnapshotJSONSerializer::SerializeEdges(const List<HeapEntry*>& nodes) {
+  bool first_edge = true;
+  for (int i = 0; i < nodes.length(); ++i) {
+    HeapEntry* entry = nodes[i];
+    Vector<HeapGraphEdge> children = entry->children();
+    for (int j = 0; j < children.length(); ++j) {
+      SerializeEdge(&children[j], first_edge);
+      first_edge = false;
+      if (writer_->aborted()) return;
+    }
+  }
+}
+
+
+void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry,
+                                               int edges_index) {
   // The buffer needs space for 6 ints, 1 uint32_t, 7 commas, \n and \0
   static const int kBufferSize =
       6 * MaxDecimalDigitsIn<sizeof(int)>::kSigned  // NOLINT
       + MaxDecimalDigitsIn<sizeof(uint32_t)>::kUnsigned  // NOLINT
       + 7 + 1 + 1;
   EmbeddedVector<char, kBufferSize> buffer;
-  Vector<HeapGraphEdge> children = entry->children();
   int buffer_pos = 0;
   buffer[buffer_pos++] = '\n';
-  buffer[buffer_pos++] = ',';
+  if (entry->entry_index() != 0) {
+    buffer[buffer_pos++] = ',';
+  }
   buffer_pos = itoa(entry->type(), buffer, buffer_pos);
   buffer[buffer_pos++] = ',';
   buffer_pos = itoa(GetStringId(entry->name()), buffer, buffer_pos);
@@ -3674,93 +3724,19 @@
   buffer[buffer_pos++] = ',';
   buffer_pos = itoa(entry->dominator()->entry_index(), buffer, buffer_pos);
   buffer[buffer_pos++] = ',';
-  buffer_pos = itoa(children.length(), buffer, buffer_pos);
+  buffer_pos = itoa(edges_index, buffer, buffer_pos);
   buffer[buffer_pos++] = '\0';
   writer_->AddString(buffer.start());
-  for (int i = 0; i < children.length(); ++i) {
-    SerializeEdge(&children[i]);
-    if (writer_->aborted()) return;
-  }
 }
 
 
-void HeapSnapshotJSONSerializer::SerializeNodes() {
-  // The first (zero) item of nodes array is an object describing node
-  // serialization layout.  We use a set of macros to improve
-  // readability.
-#define JSON_A(s) "["s"]"
-#define JSON_O(s) "{"s"}"
-#define JSON_S(s) "\""s"\""
-  writer_->AddString(JSON_O(
-    JSON_S("fields") ":" JSON_A(
-        JSON_S("type")
-        "," JSON_S("name")
-        "," JSON_S("id")
-        "," JSON_S("self_size")
-        "," JSON_S("retained_size")
-        "," JSON_S("dominator")
-        "," JSON_S("children_count")
-        "," JSON_S("children"))
-    "," JSON_S("types") ":" JSON_A(
-        JSON_A(
-            JSON_S("hidden")
-            "," JSON_S("array")
-            "," JSON_S("string")
-            "," JSON_S("object")
-            "," JSON_S("code")
-            "," JSON_S("closure")
-            "," JSON_S("regexp")
-            "," JSON_S("number")
-            "," JSON_S("native")
-            "," JSON_S("synthetic"))
-        "," JSON_S("string")
-        "," JSON_S("number")
-        "," JSON_S("number")
-        "," JSON_S("number")
-        "," JSON_S("number")
-        "," JSON_S("number")
-        "," JSON_O(
-            JSON_S("fields") ":" JSON_A(
-                JSON_S("type")
-                "," JSON_S("name_or_index")
-                "," JSON_S("to_node"))
-            "," JSON_S("types") ":" JSON_A(
-                JSON_A(
-                    JSON_S("context")
-                    "," JSON_S("element")
-                    "," JSON_S("property")
-                    "," JSON_S("internal")
-                    "," JSON_S("hidden")
-                    "," JSON_S("shortcut")
-                    "," JSON_S("weak"))
-                "," JSON_S("string_or_number")
-                "," JSON_S("node"))))));
-#undef JSON_S
-#undef JSON_O
-#undef JSON_A
-
-  const int node_fields_count = 7;
-  // type,name,id,self_size,retained_size,dominator,children_count.
+void HeapSnapshotJSONSerializer::SerializeNodes(const List<HeapEntry*>& nodes) {
   const int edge_fields_count = 3;  // type,name|index,to_node.
-
-  List<HeapEntry*>& nodes = *(snapshot_->entries());
-  // Root must be the first.
-  ASSERT(nodes.first() == snapshot_->root());
-  // Rewrite node indexes, so they refer to actual array positions. Do this
-  // only once.
-  if (nodes[0]->entry_index() == -1) {
-    // Nodes start from array index 1.
-    int index = 1;
-    for (int i = 0; i < nodes.length(); ++i) {
-      HeapEntry* node = nodes[i];
-      node->set_entry_index(index);
-      index += node_fields_count +
-          node->children().length() * edge_fields_count;
-    }
-  }
-
+  int edges_index = 0;
   for (int i = 0; i < nodes.length(); ++i) {
-    SerializeNode(nodes[i]);
+    HeapEntry* entry = nodes[i];
+    SerializeNode(entry, edges_index);
+    edges_index += entry->children().length() * edge_fields_count;
     if (writer_->aborted()) return;
   }
 }
@@ -3772,6 +3748,61 @@
   writer_->AddString("\"");
   writer_->AddString(",\"uid\":");
   writer_->AddNumber(snapshot_->uid());
+  writer_->AddString(",\"meta\":");
+  // The object describing node serialization layout.
+  // We use a set of macros to improve readability.
+#define JSON_A(s) "["s"]"
+#define JSON_O(s) "{"s"}"
+#define JSON_S(s) "\""s"\""
+  writer_->AddString(JSON_O(
+    JSON_S("node_fields") ":" JSON_A(
+        JSON_S("type") ","
+        JSON_S("name") ","
+        JSON_S("id") ","
+        JSON_S("self_size") ","
+        JSON_S("retained_size") ","
+        JSON_S("dominator") ","
+        JSON_S("edges_index")) ","
+    JSON_S("node_types") ":" JSON_A(
+        JSON_A(
+            JSON_S("hidden") ","
+            JSON_S("array") ","
+            JSON_S("string") ","
+            JSON_S("object") ","
+            JSON_S("code") ","
+            JSON_S("closure") ","
+            JSON_S("regexp") ","
+            JSON_S("number") ","
+            JSON_S("native") ","
+            JSON_S("synthetic")) ","
+        JSON_S("string") ","
+        JSON_S("number") ","
+        JSON_S("number") ","
+        JSON_S("number") ","
+        JSON_S("number") ","
+        JSON_S("number")) ","
+    JSON_S("edge_fields") ":" JSON_A(
+        JSON_S("type") ","
+        JSON_S("name_or_index") ","
+        JSON_S("to_node")) ","
+    JSON_S("edge_types") ":" JSON_A(
+        JSON_A(
+            JSON_S("context") ","
+            JSON_S("element") ","
+            JSON_S("property") ","
+            JSON_S("internal") ","
+            JSON_S("hidden") ","
+            JSON_S("shortcut") ","
+            JSON_S("weak")) ","
+        JSON_S("string_or_number") ","
+        JSON_S("node"))));
+#undef JSON_S
+#undef JSON_O
+#undef JSON_A
+  writer_->AddString(",\"node_count\":");
+  writer_->AddNumber(snapshot_->entries()->length());
+  writer_->AddString(",\"edge_count\":");
+  writer_->AddNumber(snapshot_->number_of_edges());
 }
 
 
diff --git a/src/profile-generator.h b/src/profile-generator.h
index 96640fd..897962e 100644
--- a/src/profile-generator.h
+++ b/src/profile-generator.h
@@ -464,21 +464,20 @@
   void Init(int child_index, Type type, int index, HeapEntry* to);
   void Init(int child_index, int index, HeapEntry* to);
 
-  Type type() { return static_cast<Type>(type_); }
-  int index() {
+  Type type() const { return static_cast<Type>(type_); }
+  int index() const {
     ASSERT(type_ == kElement || type_ == kHidden || type_ == kWeak);
     return index_;
   }
-  const char* name() {
+  const char* name() const {
     ASSERT(type_ == kContextVariable
            || type_ == kProperty
            || type_ == kInternal
            || type_ == kShortcut);
     return name_;
   }
-  HeapEntry* to() { return to_; }
-
-  HeapEntry* From();
+  HeapEntry* to() const { return to_; }
+  INLINE(HeapEntry* from() const);
 
  private:
   int child_index_ : 29;
@@ -564,6 +563,8 @@
   void clear_paint() { painted_ = false; }
   bool painted() { return painted_; }
   void paint() { painted_ = true; }
+  bool reachable_from_window() { return reachable_from_window_; }
+  void set_reachable_from_window() { reachable_from_window_ = true; }
 
   void SetIndexedReference(HeapGraphEdge::Type type,
                            int child_index,
@@ -600,8 +601,9 @@
   const char* TypeAsString();
 
   unsigned painted_: 1;
+  unsigned reachable_from_window_: 1;
   unsigned type_: 4;
-  int children_count_: 27;
+  int children_count_: 26;
   int retainers_count_;
   int self_size_;
   union {
@@ -648,6 +650,7 @@
   HeapEntry* gc_subroot(int index) { return gc_subroot_entries_[index]; }
   List<HeapEntry*>* entries() { return &entries_; }
   size_t raw_entries_size() { return raw_entries_size_; }
+  int number_of_edges() { return number_of_edges_; }
   void RememberLastJSObjectId();
   SnapshotObjectId max_snapshot_js_object_id() const {
     return max_snapshot_js_object_id_;
@@ -690,6 +693,7 @@
   List<HeapEntry*> entries_;
   List<HeapEntry*> sorted_entries_;
   size_t raw_entries_size_;
+  int number_of_edges_;
   SnapshotObjectId max_snapshot_js_object_id_;
 
   friend class HeapSnapshotTester;
@@ -701,10 +705,10 @@
 class HeapObjectsMap {
  public:
   HeapObjectsMap();
-  ~HeapObjectsMap();
 
   void SnapshotGenerationFinished();
-  SnapshotObjectId FindObject(Address addr);
+  SnapshotObjectId FindEntry(Address addr);
+  SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size);
   void MoveObject(Address from, Address to);
   SnapshotObjectId last_assigned_id() const {
     return next_id_ - kObjectIdStep;
@@ -725,23 +729,22 @@
 
  private:
   struct EntryInfo {
-    EntryInfo(SnapshotObjectId id, Address addr)
-      : id(id), addr(addr), accessed(true) { }
-    EntryInfo(SnapshotObjectId id, Address addr, bool accessed)
-      : id(id), addr(addr), accessed(accessed) { }
+  EntryInfo(SnapshotObjectId id, Address addr, unsigned int size)
+      : id(id), addr(addr), size(size), accessed(true) { }
+  EntryInfo(SnapshotObjectId id, Address addr, unsigned int size, bool accessed)
+      : id(id), addr(addr), size(size), accessed(accessed) { }
     SnapshotObjectId id;
     Address addr;
+    unsigned int size;
     bool accessed;
   };
   struct TimeInterval {
-    explicit TimeInterval(SnapshotObjectId id) : id(id), count(0) { }
+    explicit TimeInterval(SnapshotObjectId id) : id(id), size(0), count(0) { }
     SnapshotObjectId id;
+    unsigned int size;
     uint32_t count;
   };
 
-  void AddEntry(Address addr, SnapshotObjectId id);
-  SnapshotObjectId FindEntry(Address addr);
-  SnapshotObjectId FindOrAddEntry(Address addr);
   void UpdateHeapObjectsMap();
   void RemoveDeadEntries();
 
@@ -755,10 +758,9 @@
         v8::internal::kZeroHashSeed);
   }
 
-  bool initial_fill_mode_;
   SnapshotObjectId next_id_;
   HashMap entries_map_;
-  List<EntryInfo>* entries_;
+  List<EntryInfo> entries_;
   List<TimeInterval> time_intervals_;
 
   DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap);
@@ -787,7 +789,12 @@
   StringsStorage* names() { return &names_; }
   TokenEnumerator* token_enumerator() { return token_enumerator_; }
 
-  SnapshotObjectId GetObjectId(Address addr) { return ids_.FindObject(addr); }
+  SnapshotObjectId FindObjectId(Address object_addr) {
+    return ids_.FindEntry(object_addr);
+  }
+  SnapshotObjectId GetObjectId(Address object_addr, int object_size) {
+    return ids_.FindOrAddEntry(object_addr, object_size);
+  }
   Handle<HeapObject> FindHeapObjectById(SnapshotObjectId id);
   void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); }
   SnapshotObjectId last_assigned_id() const {
@@ -1013,7 +1020,7 @@
                                     HeapEntry* parent,
                                     String* reference_name,
                                     Object* child);
-  void SetRootShortcutReference(Object* child);
+  void SetWindowReference(Object* window);
   void SetRootGcRootsReference();
   void SetGcRootsReference(VisitorSynchronization::SyncTag tag);
   void SetGcSubrootReference(
@@ -1117,7 +1124,9 @@
   bool CalculateRetainedSizes();
   bool CountEntriesAndReferences();
   bool FillReferences();
-  void FillReversePostorderIndexes(Vector<HeapEntry*>* entries);
+  void FillPostorderIndexes(Vector<HeapEntry*>* entries);
+  bool IsWindowReference(const HeapGraphEdge& edge);
+  void MarkWindowReachableObjects();
   void ProgressStep();
   bool ProgressReport(bool force = false);
   bool SetEntriesDominators();
@@ -1160,12 +1169,14 @@
         v8::internal::kZeroHashSeed);
   }
 
+  void CalculateNodeIndexes(const List<HeapEntry*>& nodes);
   HeapSnapshot* CreateFakeSnapshot();
   int GetStringId(const char* s);
-  void SerializeEdge(HeapGraphEdge* edge);
+  void SerializeEdge(HeapGraphEdge* edge, bool first_edge);
+  void SerializeEdges(const List<HeapEntry*>& nodes);
   void SerializeImpl();
-  void SerializeNode(HeapEntry* entry);
-  void SerializeNodes();
+  void SerializeNode(HeapEntry* entry, int edges_index);
+  void SerializeNodes(const List<HeapEntry*>& nodes);
   void SerializeSnapshot();
   void SerializeString(const unsigned char* s);
   void SerializeStrings();
diff --git a/src/property.h b/src/property.h
index 04f78b2..ba5e3c8 100644
--- a/src/property.h
+++ b/src/property.h
@@ -214,13 +214,6 @@
     number_ = number;
   }
 
-  void DescriptorResult(JSObject* holder, Smi* details, int number) {
-    lookup_type_ = DESCRIPTOR_TYPE;
-    holder_ = holder;
-    details_ = PropertyDetails(details);
-    number_ = number;
-  }
-
   void ConstantResult(JSObject* holder) {
     lookup_type_ = CONSTANT_TYPE;
     holder_ = holder;
diff --git a/src/regexp.js b/src/regexp.js
index eb617ea..7bcb612 100644
--- a/src/regexp.js
+++ b/src/regexp.js
@@ -296,7 +296,7 @@
 // of the last successful match.
 function RegExpGetLastMatch() {
   if (lastMatchInfoOverride !== null) {
-    return lastMatchInfoOverride[0];
+    return OVERRIDE_MATCH(lastMatchInfoOverride);
   }
   var regExpSubject = LAST_SUBJECT(lastMatchInfo);
   return SubString(regExpSubject,
@@ -334,8 +334,8 @@
     subject = LAST_SUBJECT(lastMatchInfo);
   } else {
     var override = lastMatchInfoOverride;
-    start_index = override[override.length - 2];
-    subject = override[override.length - 1];
+    start_index = OVERRIDE_POS(override);
+    subject = OVERRIDE_SUBJECT(override);
   }
   return SubString(subject, 0, start_index);
 }
@@ -349,9 +349,9 @@
     subject = LAST_SUBJECT(lastMatchInfo);
   } else {
     var override = lastMatchInfoOverride;
-    subject = override[override.length - 1];
-    var pattern = override[override.length - 3];
-    start_index = override[override.length - 2] + pattern.length;
+    subject = OVERRIDE_SUBJECT(override);
+    var match = OVERRIDE_MATCH(override);
+    start_index = OVERRIDE_POS(override) + match.length;
   }
   return SubString(subject, start_index, subject.length);
 }
@@ -363,7 +363,9 @@
 function RegExpMakeCaptureGetter(n) {
   return function() {
     if (lastMatchInfoOverride) {
-      if (n < lastMatchInfoOverride.length - 2) return lastMatchInfoOverride[n];
+      if (n < lastMatchInfoOverride.length - 2) {
+        return OVERRIDE_CAPTURE(lastMatchInfoOverride, n);
+      }
       return '';
     }
     var index = n * 2;
diff --git a/src/runtime.cc b/src/runtime.cc
index 3c65d09..d8da56c 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -1289,90 +1289,76 @@
     // We have to declare a global const property. To capture we only
     // assign to it when evaluating the assignment for "const x =
     // <expr>" the initial value is the hole.
-    bool is_const_property = value->IsTheHole();
-    bool is_function_declaration = false;
-    if (value->IsUndefined() || is_const_property) {
+    bool is_var = value->IsUndefined();
+    bool is_const = value->IsTheHole();
+    bool is_function = value->IsSharedFunctionInfo();
+    bool is_module = value->IsJSModule();
+    ASSERT(is_var + is_const + is_function + is_module == 1);
+
+    if (is_var || is_const) {
       // Lookup the property in the global object, and don't set the
       // value of the variable if the property is already there.
+      // Do the lookup locally only, see ES5 errata.
       LookupResult lookup(isolate);
-      global->Lookup(*name, &lookup);
+      global->LocalLookup(*name, &lookup);
       if (lookup.IsProperty()) {
         // We found an existing property. Unless it was an interceptor
         // that claims the property is absent, skip this declaration.
-        if (lookup.type() != INTERCEPTOR) {
-          continue;
-        }
+        if (lookup.type() != INTERCEPTOR) continue;
         PropertyAttributes attributes = global->GetPropertyAttribute(*name);
-        if (attributes != ABSENT) {
-          continue;
-        }
+        if (attributes != ABSENT) continue;
         // Fall-through and introduce the absent property by using
         // SetProperty.
       }
-    } else {
-      is_function_declaration = true;
+    } else if (is_function) {
       // Copy the function and update its context. Use it as value.
       Handle<SharedFunctionInfo> shared =
           Handle<SharedFunctionInfo>::cast(value);
       Handle<JSFunction> function =
-          isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
-                                                                context,
-                                                                TENURED);
+          isolate->factory()->NewFunctionFromSharedFunctionInfo(
+              shared, context, TENURED);
       value = function;
     }
 
     LookupResult lookup(isolate);
     global->LocalLookup(*name, &lookup);
 
-    // Compute the property attributes. According to ECMA-262, section
-    // 13, page 71, the property must be read-only and
-    // non-deletable. However, neither SpiderMonkey nor KJS creates the
-    // property as read-only, so we don't either.
+    // Compute the property attributes. According to ECMA-262,
+    // the property must be non-configurable except in eval.
     int attr = NONE;
-    if (!DeclareGlobalsEvalFlag::decode(flags)) {
+    bool is_eval = DeclareGlobalsEvalFlag::decode(flags);
+    if (!is_eval || is_module) {
       attr |= DONT_DELETE;
     }
     bool is_native = DeclareGlobalsNativeFlag::decode(flags);
-    if (is_const_property || (is_native && is_function_declaration)) {
+    if (is_const || is_module || (is_native && is_function)) {
       attr |= READ_ONLY;
     }
 
     LanguageMode language_mode = DeclareGlobalsLanguageMode::decode(flags);
 
-    // Safari does not allow the invocation of callback setters for
-    // function declarations. To mimic this behavior, we do not allow
-    // the invocation of setters for function values. This makes a
-    // difference for global functions with the same names as event
-    // handlers such as "function onload() {}". Firefox does call the
-    // onload setter in those case and Safari does not. We follow
-    // Safari for compatibility.
-    if (is_function_declaration) {
-      if (lookup.IsProperty() && (lookup.type() != INTERCEPTOR)) {
-        // Do not overwrite READ_ONLY properties.
-        if (lookup.GetAttributes() & READ_ONLY) {
-          if (language_mode != CLASSIC_MODE) {
-            Handle<Object> args[] = { name };
-            return isolate->Throw(*isolate->factory()->NewTypeError(
-                "strict_cannot_assign", HandleVector(args, ARRAY_SIZE(args))));
-          }
-          continue;
+    if (!lookup.IsProperty() || is_function || is_module) {
+      // If the local property exists, check that we can reconfigure it
+      // as required for function declarations.
+      if (lookup.IsProperty() && lookup.IsDontDelete()) {
+        if (lookup.IsReadOnly() || lookup.IsDontEnum() ||
+            lookup.type() == CALLBACKS) {
+          return ThrowRedeclarationError(
+              isolate, is_function ? "function" : "module", name);
         }
-        // Do not change DONT_DELETE to false from true.
-        attr |= lookup.GetAttributes() & DONT_DELETE;
+        // If the existing property is not configurable, keep its attributes.
+        attr = lookup.GetAttributes();
       }
-      PropertyAttributes attributes = static_cast<PropertyAttributes>(attr);
-
-      RETURN_IF_EMPTY_HANDLE(
-          isolate,
-          JSObject::SetLocalPropertyIgnoreAttributes(global, name, value,
-                                                     attributes));
+      // Define or redefine own property.
+      RETURN_IF_EMPTY_HANDLE(isolate,
+          JSObject::SetLocalPropertyIgnoreAttributes(
+              global, name, value, static_cast<PropertyAttributes>(attr)));
     } else {
-      RETURN_IF_EMPTY_HANDLE(
-          isolate,
-          JSReceiver::SetProperty(global, name, value,
-                                  static_cast<PropertyAttributes>(attr),
-                                  language_mode == CLASSIC_MODE
-                                      ? kNonStrictMode : kStrictMode));
+      // Do a [[Put]] on the existing (own) property.
+      RETURN_IF_EMPTY_HANDLE(isolate,
+          JSObject::SetProperty(
+              global, name, value, static_cast<PropertyAttributes>(attr),
+              language_mode == CLASSIC_MODE ? kNonStrictMode : kStrictMode));
     }
   }
 
@@ -1405,6 +1391,8 @@
 
   if (attributes != ABSENT) {
     // The name was declared before; check for conflicting re-declarations.
+    // Note: this is actually inconsistent with what happens for globals (where
+    // we silently ignore such declarations).
     if (((attributes & READ_ONLY) != 0) || (mode == READ_ONLY)) {
       // Functions are not read-only.
       ASSERT(mode != READ_ONLY || initial_value->IsTheHole());
@@ -1467,9 +1455,14 @@
         return ThrowRedeclarationError(isolate, "const", name);
       }
     }
-    RETURN_IF_EMPTY_HANDLE(
-        isolate,
-        JSReceiver::SetProperty(object, name, value, mode, kNonStrictMode));
+    if (object->IsJSGlobalObject()) {
+      // Define own property on the global object.
+      RETURN_IF_EMPTY_HANDLE(isolate,
+         JSObject::SetLocalPropertyIgnoreAttributes(object, name, value, mode));
+    } else {
+      RETURN_IF_EMPTY_HANDLE(isolate,
+         JSReceiver::SetProperty(object, name, value, mode, kNonStrictMode));
+    }
   }
 
   return isolate->heap()->undefined_value();
@@ -2101,7 +2094,7 @@
     DescriptorArray* instance_desc = function->map()->instance_descriptors();
     int index = instance_desc->Search(name);
     ASSERT(index != DescriptorArray::kNotFound);
-    PropertyDetails details(instance_desc->GetDetails(index));
+    PropertyDetails details = instance_desc->GetDetails(index);
     CallbacksDescriptor new_desc(name,
         instance_desc->GetValue(index),
         static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
@@ -8605,6 +8598,25 @@
 }
 
 
+RUNTIME_FUNCTION(MaybeObject*, Runtime_PushModuleContext) {
+  NoHandleAllocation ha;
+  ASSERT(args.length() == 2);
+  CONVERT_ARG_CHECKED(ScopeInfo, scope_info, 0);
+  CONVERT_ARG_HANDLE_CHECKED(JSModule, instance, 1);
+
+  Context* context;
+  MaybeObject* maybe_context =
+      isolate->heap()->AllocateModuleContext(isolate->context(),
+                                             scope_info);
+  if (!maybe_context->To(&context)) return maybe_context;
+  // Also initialize the context slot of the instance object.
+  instance->set_context(context);
+  isolate->set_context(context);
+
+  return context;
+}
+
+
 RUNTIME_FUNCTION(MaybeObject*, Runtime_DeleteContextSlot) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 2);
diff --git a/src/runtime.h b/src/runtime.h
index fe9cfd9..b8918cd 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -323,6 +323,7 @@
   F(PushWithContext, 2, 1) \
   F(PushCatchContext, 3, 1) \
   F(PushBlockContext, 2, 1) \
+  F(PushModuleContext, 2, 1) \
   F(DeleteContextSlot, 2, 1) \
   F(LoadContextSlot, 2, 2) \
   F(LoadContextSlotNoReferenceError, 2, 2) \
diff --git a/src/scanner.cc b/src/scanner.cc
index 7901b5d..f24af2e 100755
--- a/src/scanner.cc
+++ b/src/scanner.cc
@@ -611,7 +611,7 @@
 }
 
 
-void Scanner::ScanEscape() {
+bool Scanner::ScanEscape() {
   uc32 c = c0_;
   Advance();
 
@@ -621,7 +621,7 @@
     if (IsCarriageReturn(c) && IsLineFeed(c0_)) Advance();
     // Allow LF+CR newlines in multiline string literals.
     if (IsLineFeed(c) && IsCarriageReturn(c0_)) Advance();
-    return;
+    return true;
   }
 
   switch (c) {
@@ -635,13 +635,13 @@
     case 't' : c = '\t'; break;
     case 'u' : {
       c = ScanHexNumber(4);
-      if (c < 0) c = 'u';
+      if (c < 0) return false;
       break;
     }
     case 'v' : c = '\v'; break;
     case 'x' : {
       c = ScanHexNumber(2);
-      if (c < 0) c = 'x';
+      if (c < 0) return false;
       break;
     }
     case '0' :  // fall through
@@ -654,10 +654,11 @@
     case '7' : c = ScanOctalEscape(c, 2); break;
   }
 
-  // According to ECMA-262, 3rd, 7.8.4 (p 18ff) these
-  // should be illegal, but they are commonly handled
-  // as non-escaped characters by JS VMs.
+  // According to ECMA-262, section 7.8.4, characters not covered by the
+  // above cases should be illegal, but they are commonly handled as
+  // non-escaped characters by JS VMs.
   AddLiteralChar(c);
+  return true;
 }
 
 
@@ -696,8 +697,7 @@
     uc32 c = c0_;
     Advance();
     if (c == '\\') {
-      if (c0_ < 0) return Token::ILLEGAL;
-      ScanEscape();
+      if (c0_ < 0 || !ScanEscape()) return Token::ILLEGAL;
     } else {
       AddLiteralChar(c);
     }
diff --git a/src/scanner.h b/src/scanner.h
index 045e7d2..4de413b 100644
--- a/src/scanner.h
+++ b/src/scanner.h
@@ -520,13 +520,16 @@
   Token::Value ScanIdentifierOrKeyword();
   Token::Value ScanIdentifierSuffix(LiteralScope* literal);
 
-  void ScanEscape();
   Token::Value ScanString();
 
-  // Decodes a unicode escape-sequence which is part of an identifier.
+  // Scans an escape-sequence which is part of a string and adds the
+  // decoded character to the current literal. Returns true if a pattern
+  // is scanned.
+  bool ScanEscape();
+  // Decodes a Unicode escape-sequence which is part of an identifier.
   // If the escape sequence cannot be decoded the result is kBadChar.
   uc32 ScanIdentifierUnicodeEscape();
-  // Recognizes a uniocde escape-sequence and adds its characters,
+  // Scans a Unicode escape-sequence and adds its characters,
   // uninterpreted, to the current literal. Used for parsing RegExp
   // flags.
   bool ScanLiteralUnicodeEscape();
diff --git a/src/scopeinfo.cc b/src/scopeinfo.cc
index 0f36234..f50af30 100644
--- a/src/scopeinfo.cc
+++ b/src/scopeinfo.cc
@@ -53,7 +53,7 @@
   FunctionVariableInfo function_name_info;
   VariableMode function_variable_mode;
   if (scope->is_function_scope() && scope->function() != NULL) {
-    Variable* var = scope->function()->var();
+    Variable* var = scope->function()->proxy()->var();
     if (!var->is_used()) {
       function_name_info = UNUSED;
     } else if (var->IsContextSlot()) {
@@ -129,8 +129,8 @@
   // If present, add the function variable name and its index.
   ASSERT(index == scope_info->FunctionNameEntryIndex());
   if (has_function_name) {
-    int var_index = scope->function()->var()->index();
-    scope_info->set(index++, *scope->function()->name());
+    int var_index = scope->function()->proxy()->var()->index();
+    scope_info->set(index++, *scope->function()->proxy()->name());
     scope_info->set(index++, Smi::FromInt(var_index));
     ASSERT(function_name_info != STACK ||
            (var_index == scope_info->StackLocalCount() &&
@@ -142,7 +142,9 @@
   ASSERT(index == scope_info->length());
   ASSERT(scope->num_parameters() == scope_info->ParameterCount());
   ASSERT(scope->num_stack_slots() == scope_info->StackSlotCount());
-  ASSERT(scope->num_heap_slots() == scope_info->ContextLength());
+  ASSERT(scope->num_heap_slots() == scope_info->ContextLength() ||
+         (scope->num_heap_slots() == kVariablePartIndex &&
+          scope_info->ContextLength() == 0));
   return scope_info;
 }
 
diff --git a/src/scopes.cc b/src/scopes.cc
index 1e35e48..6f6032a 100644
--- a/src/scopes.cc
+++ b/src/scopes.cc
@@ -415,14 +415,20 @@
 
 Variable* Scope::LookupFunctionVar(Handle<String> name,
                                    AstNodeFactory<AstNullVisitor>* factory) {
-  if (function_ != NULL && function_->name().is_identical_to(name)) {
-    return function_->var();
+  if (function_ != NULL && function_->proxy()->name().is_identical_to(name)) {
+    return function_->proxy()->var();
   } else if (!scope_info_.is_null()) {
     // If we are backed by a scope info, try to lookup the variable there.
     VariableMode mode;
     int index = scope_info_->FunctionContextSlotIndex(*name, &mode);
     if (index < 0) return NULL;
-    Variable* var = DeclareFunctionVar(name, mode, factory);
+    Variable* var = new Variable(
+        this, name, mode, true /* is valid LHS */,
+        Variable::NORMAL, kCreatedInitialized);
+    VariableProxy* proxy = factory->NewVariableProxy(var);
+    VariableDeclaration* declaration =
+        factory->NewVariableDeclaration(proxy, mode, this);
+    DeclareFunctionVar(declaration);
     var->AllocateTo(Variable::CONTEXT, index);
     return var;
   } else {
@@ -794,7 +800,7 @@
   // Function name, if any (named function literals, only).
   if (function_ != NULL) {
     Indent(n1, "// (local) function name: ");
-    PrintName(function_->name());
+    PrintName(function_->proxy()->name());
     PrintF("\n");
   }
 
@@ -827,7 +833,7 @@
   // Print locals.
   Indent(n1, "// function var\n");
   if (function_ != NULL) {
-    PrintVar(n1, function_->var());
+    PrintVar(n1, function_->proxy()->var());
   }
 
   Indent(n1, "// temporary vars\n");
@@ -1093,7 +1099,7 @@
   // Exceptions: temporary variables are never allocated in a context;
   // catch-bound variables are always allocated in a context.
   if (var->mode() == TEMPORARY) return false;
-  if (is_catch_scope() || is_block_scope()) return true;
+  if (is_catch_scope() || is_block_scope() || is_module_scope()) return true;
   return var->has_forced_context_allocation() ||
       scope_calls_eval_ ||
       inner_scope_calls_eval_ ||
@@ -1211,7 +1217,7 @@
   // because of the current ScopeInfo implementation (see
   // ScopeInfo::ScopeInfo(FunctionScope* scope) constructor).
   if (function_ != NULL) {
-    AllocateNonParameterLocal(function_->var());
+    AllocateNonParameterLocal(function_->proxy()->var());
   }
 }
 
@@ -1237,7 +1243,8 @@
   // Force allocation of a context for this scope if necessary. For a 'with'
   // scope and for a function scope that makes an 'eval' call we need a context,
   // even if no local variables were statically allocated in the scope.
-  bool must_have_context = is_with_scope() ||
+  // Likewise for modules.
+  bool must_have_context = is_with_scope() || is_module_scope() ||
       (is_function_scope() && calls_eval());
 
   // If we didn't allocate any locals in the local context, then we only
@@ -1253,14 +1260,14 @@
 
 int Scope::StackLocalCount() const {
   return num_stack_slots() -
-      (function_ != NULL && function_->var()->IsStackLocal() ? 1 : 0);
+      (function_ != NULL && function_->proxy()->var()->IsStackLocal() ? 1 : 0);
 }
 
 
 int Scope::ContextLocalCount() const {
   if (num_heap_slots() == 0) return 0;
   return num_heap_slots() - Context::MIN_CONTEXT_SLOTS -
-      (function_ != NULL && function_->var()->IsContextSlot() ? 1 : 0);
+      (function_ != NULL && function_->proxy()->var()->IsContextSlot() ? 1 : 0);
 }
 
 } }  // namespace v8::internal
diff --git a/src/scopes.h b/src/scopes.h
index d315b7e..e1a658a 100644
--- a/src/scopes.h
+++ b/src/scopes.h
@@ -126,15 +126,9 @@
   // Declare the function variable for a function literal. This variable
   // is in an intermediate scope between this function scope and the the
   // outer scope. Only possible for function scopes; at most one variable.
-  template<class Visitor>
-  Variable* DeclareFunctionVar(Handle<String> name,
-                               VariableMode mode,
-                               AstNodeFactory<Visitor>* factory) {
-    ASSERT(is_function_scope() && function_ == NULL);
-    Variable* function_var = new Variable(
-        this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
-    function_ = factory->NewVariableProxy(function_var);
-    return function_var;
+  void DeclareFunctionVar(VariableDeclaration* declaration) {
+    ASSERT(is_function_scope());
+    function_ = declaration;
   }
 
   // Declare a parameter in this scope.  When there are duplicated
@@ -312,9 +306,8 @@
   Variable* receiver() { return receiver_; }
 
   // The variable holding the function literal for named function
-  // literals, or NULL.
-  // Only valid for function scopes.
-  VariableProxy* function() const {
+  // literals, or NULL.  Only valid for function scopes.
+  VariableDeclaration* function() const {
     ASSERT(is_function_scope());
     return function_;
   }
@@ -446,7 +439,7 @@
   // Convenience variable.
   Variable* receiver_;
   // Function variable, if any; function scopes only.
-  VariableProxy* function_;
+  VariableDeclaration* function_;
   // Convenience variable; function scopes only.
   Variable* arguments_;
   // Interface; module scopes only.
diff --git a/src/v8.cc b/src/v8.cc
index 45036c8..2910a07 100644
--- a/src/v8.cc
+++ b/src/v8.cc
@@ -118,6 +118,8 @@
 
   delete call_completed_callbacks_;
   call_completed_callbacks_ = NULL;
+
+  OS::TearDown();
 }
 
 
@@ -248,7 +250,6 @@
 }
 
 void V8::InitializeOncePerProcessImpl() {
-  // Set up the platform OS support.
   OS::SetUp();
 
   use_crankshaft_ = FLAG_crankshaft;
diff --git a/src/v8globals.h b/src/v8globals.h
index bfc5e23..6a1766a 100644
--- a/src/v8globals.h
+++ b/src/v8globals.h
@@ -48,6 +48,10 @@
 const intptr_t kPointerAlignment = (1 << kPointerSizeLog2);
 const intptr_t kPointerAlignmentMask = kPointerAlignment - 1;
 
+// Desired alignment for double values.
+const intptr_t kDoubleAlignment = 8;
+const intptr_t kDoubleAlignmentMask = kDoubleAlignment - 1;
+
 // Desired alignment for maps.
 #if V8_HOST_ARCH_64_BIT
 const intptr_t kMapAlignmentBits = kObjectAlignmentBits;
diff --git a/src/version.cc b/src/version.cc
index d193735..567d244 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,8 +34,8 @@
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     10
-#define BUILD_NUMBER      2
-#define PATCH_LEVEL       1
+#define BUILD_NUMBER      3
+#define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 2845039..ce9067c 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -5112,56 +5112,24 @@
   // rax: string
   // rbx: instance type
   // Calculate length of sub string using the smi values.
-  Label result_longer_than_two;
   __ movq(rcx, Operand(rsp, kToOffset));
   __ movq(rdx, Operand(rsp, kFromOffset));
   __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
 
   __ SmiSub(rcx, rcx, rdx);  // Overflow doesn't happen.
-  __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx);
+  __ cmpq(rcx, FieldOperand(rax, String::kLengthOffset));
   Label not_original_string;
-  __ j(not_equal, &not_original_string, Label::kNear);
+  // Shorter than original string's length: an actual substring.
+  __ j(below, &not_original_string, Label::kNear);
+  // Longer than original string's length or negative: unsafe arguments.
+  __ j(above, &runtime);
+  // Return original string.
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->sub_string_native(), 1);
   __ ret(kArgumentsSize);
   __ bind(&not_original_string);
-  // Special handling of sub-strings of length 1 and 2. One character strings
-  // are handled in the runtime system (looked up in the single character
-  // cache). Two character strings are looked for in the symbol cache.
   __ SmiToInteger32(rcx, rcx);
-  __ cmpl(rcx, Immediate(2));
-  __ j(greater, &result_longer_than_two);
-  __ j(less, &runtime);
 
-  // Sub string of length 2 requested.
-  // rax: string
-  // rbx: instance type
-  // rcx: sub string length (value is 2)
-  // rdx: from index (smi)
-  __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
-
-  // Get the two characters forming the sub string.
-  __ SmiToInteger32(rdx, rdx);  // From index is no longer smi.
-  __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
-  __ movzxbq(rdi,
-             FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
-
-  // Try to lookup two character string in symbol table.
-  Label make_two_character_string;
-  StringHelper::GenerateTwoCharacterSymbolTableProbe(
-      masm, rbx, rdi, r9, r11, r14, r15, &make_two_character_string);
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(3 * kPointerSize);
-
-  __ bind(&make_two_character_string);
-  // Set up registers for allocating the two character string.
-  __ movzxwq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
-  __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime);
-  __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx);
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(3 * kPointerSize);
-
-  __ bind(&result_longer_than_two);
   // rax: string
   // rbx: instance type
   // rcx: sub string length
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 4138a16..e0000f8 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -257,11 +257,11 @@
       // For named function expressions, declare the function name as a
       // constant.
       if (scope()->is_function_scope() && scope()->function() != NULL) {
-        VariableProxy* proxy = scope()->function();
-        ASSERT(proxy->var()->mode() == CONST ||
-               proxy->var()->mode() == CONST_HARMONY);
-        ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
-        EmitDeclaration(proxy, proxy->var()->mode(), NULL);
+        VariableDeclaration* function = scope()->function();
+        ASSERT(function->proxy()->var()->mode() == CONST ||
+               function->proxy()->var()->mode() == CONST_HARMONY);
+        ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
+        VisitVariableDeclaration(function);
       }
       VisitDeclarations(scope()->declarations());
     }
@@ -753,61 +753,51 @@
 }
 
 
-void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
-                                        VariableMode mode,
-                                        FunctionLiteral* function) {
+void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
+  // The variable in the declaration always resides in the current function
+  // context.
+  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+  if (FLAG_debug_code) {
+    // Check that we're not inside a with or catch context.
+    __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
+    __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
+    __ Check(not_equal, "Declaration in with context.");
+    __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
+    __ Check(not_equal, "Declaration in catch context.");
+  }
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(
+    VariableDeclaration* declaration) {
   // If it was not possible to allocate the variable at compile time, we
   // need to "declare" it at runtime to make sure it actually exists in the
   // local context.
+  VariableProxy* proxy = declaration->proxy();
+  VariableMode mode = declaration->mode();
   Variable* variable = proxy->var();
-  bool binding_needs_init = (function == NULL) &&
-      (mode == CONST || mode == CONST_HARMONY || mode == LET);
+  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
   switch (variable->location()) {
     case Variable::UNALLOCATED:
-      ++global_count_;
+      globals_->Add(variable->name());
+      globals_->Add(variable->binding_needs_init()
+                        ? isolate()->factory()->the_hole_value()
+                        : isolate()->factory()->undefined_value());
       break;
 
     case Variable::PARAMETER:
     case Variable::LOCAL:
-      if (function != NULL) {
-        Comment cmnt(masm_, "[ Declaration");
-        VisitForAccumulatorValue(function);
-        __ movq(StackOperand(variable), result_register());
-      } else if (binding_needs_init) {
-        Comment cmnt(masm_, "[ Declaration");
+      if (hole_init) {
+        Comment cmnt(masm_, "[ VariableDeclaration");
         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
         __ movq(StackOperand(variable), kScratchRegister);
       }
       break;
 
     case Variable::CONTEXT:
-      // The variable in the decl always resides in the current function
-      // context.
-      ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
-      if (FLAG_debug_code) {
-        // Check that we're not inside a with or catch context.
-        __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
-        __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
-        __ Check(not_equal, "Declaration in with context.");
-        __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
-        __ Check(not_equal, "Declaration in catch context.");
-      }
-      if (function != NULL) {
-        Comment cmnt(masm_, "[ Declaration");
-        VisitForAccumulatorValue(function);
-        __ movq(ContextOperand(rsi, variable->index()), result_register());
-        int offset = Context::SlotOffset(variable->index());
-        // We know that we have written a function, which is not a smi.
-        __ RecordWriteContextSlot(rsi,
-                                  offset,
-                                  result_register(),
-                                  rcx,
-                                  kDontSaveFPRegs,
-                                  EMIT_REMEMBERED_SET,
-                                  OMIT_SMI_CHECK);
-        PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
-      } else if (binding_needs_init) {
-        Comment cmnt(masm_, "[ Declaration");
+      if (hole_init) {
+        Comment cmnt(masm_, "[ VariableDeclaration");
+        EmitDebugCheckDeclarationContext(variable);
         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
         __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
         // No write barrier since the hole value is in old space.
@@ -816,14 +806,12 @@
       break;
 
     case Variable::LOOKUP: {
-      Comment cmnt(masm_, "[ Declaration");
+      Comment cmnt(masm_, "[ VariableDeclaration");
       __ push(rsi);
       __ Push(variable->name());
       // Declaration nodes are always introduced in one of four modes.
-      ASSERT(mode == VAR ||
-             mode == CONST ||
-             mode == CONST_HARMONY ||
-             mode == LET);
+      ASSERT(mode == VAR || mode == LET ||
+             mode == CONST || mode == CONST_HARMONY);
       PropertyAttributes attr =
           (mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE;
       __ Push(Smi::FromInt(attr));
@@ -831,9 +819,7 @@
       // Note: For variables we must not push an initial value (such as
       // 'undefined') because we may have a (legal) redeclaration and we
       // must not destroy the current value.
-      if (function != NULL) {
-        VisitForStackValue(function);
-      } else if (binding_needs_init) {
+      if (hole_init) {
         __ PushRoot(Heap::kTheHoleValueRootIndex);
       } else {
         __ Push(Smi::FromInt(0));  // Indicates no initial value.
@@ -845,6 +831,119 @@
 }
 
 
+void FullCodeGenerator::VisitFunctionDeclaration(
+    FunctionDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  switch (variable->location()) {
+    case Variable::UNALLOCATED: {
+      globals_->Add(variable->name());
+      Handle<SharedFunctionInfo> function =
+          Compiler::BuildFunctionInfo(declaration->fun(), script());
+      // Check for stack-overflow exception.
+      if (function.is_null()) return SetStackOverflow();
+      globals_->Add(function);
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      VisitForAccumulatorValue(declaration->fun());
+      __ movq(StackOperand(variable), result_register());
+      break;
+    }
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      VisitForAccumulatorValue(declaration->fun());
+      __ movq(ContextOperand(rsi, variable->index()), result_register());
+      int offset = Context::SlotOffset(variable->index());
+      // We know that we have written a function, which is not a smi.
+      __ RecordWriteContextSlot(rsi,
+                                offset,
+                                result_register(),
+                                rcx,
+                                kDontSaveFPRegs,
+                                EMIT_REMEMBERED_SET,
+                                OMIT_SMI_CHECK);
+      PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
+      break;
+    }
+
+    case Variable::LOOKUP: {
+      Comment cmnt(masm_, "[ FunctionDeclaration");
+      __ push(rsi);
+      __ Push(variable->name());
+      __ Push(Smi::FromInt(NONE));
+      VisitForStackValue(declaration->fun());
+      __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+      break;
+    }
+  }
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  Handle<JSModule> instance = declaration->module()->interface()->Instance();
+  ASSERT(!instance.is_null());
+
+  switch (variable->location()) {
+    case Variable::UNALLOCATED: {
+      Comment cmnt(masm_, "[ ModuleDeclaration");
+      globals_->Add(variable->name());
+      globals_->Add(instance);
+      Visit(declaration->module());
+      break;
+    }
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ ModuleDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      __ Move(ContextOperand(rsi, variable->index()), instance);
+      Visit(declaration->module());
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL:
+    case Variable::LOOKUP:
+      UNREACHABLE();
+  }
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
+  VariableProxy* proxy = declaration->proxy();
+  Variable* variable = proxy->var();
+  switch (variable->location()) {
+    case Variable::UNALLOCATED:
+      // TODO(rossberg)
+      break;
+
+    case Variable::CONTEXT: {
+      Comment cmnt(masm_, "[ ImportDeclaration");
+      EmitDebugCheckDeclarationContext(variable);
+      // TODO(rossberg)
+      break;
+    }
+
+    case Variable::PARAMETER:
+    case Variable::LOCAL:
+    case Variable::LOOKUP:
+      UNREACHABLE();
+  }
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
+  // TODO(rossberg)
+}
+
+
 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
   // Call the runtime to declare the globals.
   __ push(rsi);  // The context is the first argument.
@@ -4412,7 +4511,8 @@
 
 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
   Scope* declaration_scope = scope()->DeclarationScope();
-  if (declaration_scope->is_global_scope()) {
+  if (declaration_scope->is_global_scope() ||
+      declaration_scope->is_module_scope()) {
     // Contexts nested in the global context have a canonical empty function
     // as their closure, not the anonymous closure containing the global
     // code.  Pass a smi sentinel and let the runtime look up the empty
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 28c5304..c4f12f0 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -4280,9 +4280,10 @@
         __ movq(FieldOperand(result, total_offset), rcx);
       }
     } else if (elements->IsFixedArray()) {
+      Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
       for (int i = 0; i < elements_length; i++) {
         int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
-        Handle<Object> value = JSObject::GetElement(object, i);
+        Handle<Object> value(fast_elements->get(i));
         if (value->IsJSObject()) {
           Handle<JSObject> value_object = Handle<JSObject>::cast(value);
           __ lea(rcx, Operand(result, *offset));
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index f7db250..53becf6 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -150,6 +150,20 @@
 }
 
 
+void MacroAssembler::PushAddress(ExternalReference source) {
+  int64_t address = reinterpret_cast<int64_t>(source.address());
+  if (is_int32(address) && !Serializer::enabled()) {
+    if (emit_debug_code()) {
+      movq(kScratchRegister, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
+    }
+    push(Immediate(static_cast<int32_t>(address)));
+    return;
+  }
+  LoadAddress(kScratchRegister, source);
+  push(kScratchRegister);
+}
+
+
 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
   ASSERT(root_array_available_);
   movq(destination, Operand(kRootRegister,
@@ -657,7 +671,7 @@
 
 
 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
-#ifdef _WIN64
+#if defined(_WIN64) && !defined(__MINGW64__)
   // We need to prepare a slot for result handle on stack and put
   // a pointer to it into 1st arg register.
   EnterApiExitFrame(arg_stack_space + 1);
@@ -705,7 +719,7 @@
        RelocInfo::RUNTIME_ENTRY);
   call(rax);
 
-#ifdef _WIN64
+#if defined(_WIN64) && !defined(__MINGW64__)
   // rax keeps a pointer to v8::Handle, unpack it.
   movq(rax, Operand(rax, 0));
 #endif
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 6bb5cfe..66587d5 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -127,6 +127,8 @@
   // Returns the size of the code generated by LoadAddress.
   // Used by CallSize(ExternalReference) to find the size of a call.
   int LoadAddressSize(ExternalReference source);
+  // Pushes the address of the external reference onto the stack.
+  void PushAddress(ExternalReference source);
 
   // Operations on roots in the root-array.
   void LoadRoot(Register destination, Heap::RootListIndex index);
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 86b4aff..9312c65 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -379,8 +379,7 @@
   __ push(receiver);
   __ push(holder);
   __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
-  __ movq(kScratchRegister, ExternalReference::isolate_address());
-  __ push(kScratchRegister);
+  __ PushAddress(ExternalReference::isolate_address());
 }
 
 
@@ -482,7 +481,9 @@
   // Prepare arguments.
   __ lea(rbx, Operand(rsp, 4 * kPointerSize));
 
-#ifdef _WIN64
+#if defined(__MINGW64__)
+  Register arguments_arg = rcx;
+#elif defined(_WIN64)
   // Win64 uses first register--rcx--for returned value.
   Register arguments_arg = rdx;
 #else
@@ -1010,13 +1011,15 @@
   } else {
     __ Push(Handle<Object>(callback->data()));
   }
-  __ movq(kScratchRegister, ExternalReference::isolate_address());
-  __ push(kScratchRegister);  // isolate
+  __ PushAddress(ExternalReference::isolate_address());  // isolate
   __ push(name_reg);  // name
   // Save a pointer to where we pushed the arguments pointer.
   // This will be passed as the const AccessorInfo& to the C++ callback.
 
-#ifdef _WIN64
+#if defined(__MINGW64__)
+  Register accessor_info_arg = rdx;
+  Register name_arg = rcx;
+#elif defined(_WIN64)
   // Win64 uses first register--rcx--for returned value.
   Register accessor_info_arg = r8;
   Register name_arg = rdx;
@@ -1186,8 +1189,7 @@
       __ push(holder_reg);
       __ Move(holder_reg, callback);
       __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
-      __ movq(kScratchRegister, ExternalReference::isolate_address());
-      __ push(kScratchRegister);
+      __ PushAddress(ExternalReference::isolate_address());
       __ push(holder_reg);
       __ push(name_reg);
       __ push(scratch2);  // restore return address