Version 3.5.10

Added dependency of v8_base on WinSocket2 Windows library in the GYP-build.

Various bugfixes.

git-svn-id: http://v8.googlecode.com/svn/trunk@9077 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index 2deff90..02d0ebb 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,11 @@
+2011-08-31: Version 3.5.10
+
+        Added dependency of v8_base on WinSocket2 Windows library in
+        the GYP-build.
+
+        Various bugfixes.
+
+
 2011-08-29: Version 3.5.9
 
         Made FromPropertyDescriptor not trigger inherited setters.
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 89df079..0ec3692 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -692,11 +692,11 @@
 void Assembler::next(Label* L) {
   ASSERT(L->is_linked());
   int link = target_at(L->pos());
-  if (link > 0) {
-    L->link_to(link);
-  } else {
-    ASSERT(link == kEndOfChain);
+  if (link == kEndOfChain) {
     L->Unuse();
+  } else {
+    ASSERT(link >= 0);
+    L->link_to(link);
   }
 }
 
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 328102b..a35380c 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -138,7 +138,7 @@
   __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
 
   // Clear the heap tag on the elements array.
-  ASSERT(kSmiTag == 0);
+  STATIC_ASSERT(kSmiTag == 0);
   __ sub(scratch1, scratch1, Operand(kHeapObjectTag));
 
   // Initialize the FixedArray and fill it with holes. FixedArray length is
@@ -207,7 +207,7 @@
   // Allocate the JSArray object together with space for a FixedArray with the
   // requested number of elements.
   __ bind(&not_empty);
-  ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   __ mov(elements_array_end,
          Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
   __ add(elements_array_end,
@@ -243,7 +243,7 @@
          FieldMemOperand(result, JSArray::kElementsOffset));
 
   // Clear the heap tag on the elements array.
-  ASSERT(kSmiTag == 0);
+  STATIC_ASSERT(kSmiTag == 0);
   __ sub(elements_array_storage,
          elements_array_storage,
          Operand(kHeapObjectTag));
@@ -255,7 +255,7 @@
   __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
   ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
   __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
-  ASSERT(kSmiTag == 0);
+  STATIC_ASSERT(kSmiTag == 0);
   __ tst(array_size, array_size);
   // Length of the FixedArray is the number of pre-allocated elements if
   // the actual JSArray has length 0 and the size of the JSArray for non-empty
@@ -272,7 +272,7 @@
   // result: JSObject
   // elements_array_storage: elements array element storage
   // array_size: smi-tagged size of elements array
-  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
   __ add(elements_array_end,
          elements_array_storage,
          Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
@@ -337,14 +337,14 @@
   __ bind(&argc_one_or_more);
   __ cmp(r0, Operand(1));
   __ b(ne, &argc_two_or_more);
-  ASSERT(kSmiTag == 0);
+  STATIC_ASSERT(kSmiTag == 0);
   __ ldr(r2, MemOperand(sp));  // Get the argument from the stack.
   __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
   __ b(ne, call_generic_code);
 
   // Handle construction of an empty array of a certain size. Bail out if size
   // is too large to actually allocate an elements array.
-  ASSERT(kSmiTag == 0);
+  STATIC_ASSERT(kSmiTag == 0);
   __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
   __ b(ge, call_generic_code);
 
@@ -571,7 +571,7 @@
   // Is it a String?
   __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
   __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
-  ASSERT(kNotStringTag != 0);
+  STATIC_ASSERT(kNotStringTag != 0);
   __ tst(r3, Operand(kIsNotStringMask));
   __ b(ne, &convert_argument);
   __ mov(argument, r0);
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index ffe32bc..09d2c17 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -4389,8 +4389,8 @@
   // a sequential string or an external string.
   // In the case of a sliced string its offset has to be taken into account.
   Label cons_string, check_encoding;
-  STATIC_ASSERT((kConsStringTag < kExternalStringTag));
-  STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   __ cmp(r1, Operand(kExternalStringTag));
   __ b(lt, &cons_string);
   __ b(eq, &runtime);
@@ -4857,8 +4857,8 @@
 
   // Handle non-flat strings.
   __ and_(result_, result_, Operand(kStringRepresentationMask));
-  STATIC_ASSERT((kConsStringTag < kExternalStringTag));
-  STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   __ cmp(result_, Operand(kExternalStringTag));
   __ b(gt, &sliced_string);
   __ b(eq, &call_runtime_);
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index b58743d..408946e 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -697,12 +697,12 @@
   switch (slot->type()) {
     case Slot::PARAMETER:
     case Slot::LOCAL:
-      if (mode == Variable::CONST) {
-        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
-        __ str(ip, MemOperand(fp, SlotOffset(slot)));
-      } else if (function != NULL) {
+      if (function != NULL) {
         VisitForAccumulatorValue(function);
         __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
+        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+        __ str(ip, MemOperand(fp, SlotOffset(slot)));
       }
       break;
 
@@ -721,17 +721,17 @@
         __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
         __ Check(ne, "Declaration in catch context.");
       }
-      if (mode == Variable::CONST) {
-        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
-        __ str(ip, ContextOperand(cp, slot->index()));
-        // No write barrier since the_hole_value is in old space.
-      } else if (function != NULL) {
+      if (function != NULL) {
         VisitForAccumulatorValue(function);
         __ str(result_register(), ContextOperand(cp, slot->index()));
         int offset = Context::SlotOffset(slot->index());
         // We know that we have written a function, which is not a smi.
         __ mov(r1, Operand(cp));
         __ RecordWrite(r1, Operand(offset), r2, result_register());
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
+        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+        __ str(ip, ContextOperand(cp, slot->index()));
+        // No write barrier since the_hole_value is in old space.
       }
       break;
 
@@ -747,13 +747,13 @@
       // Note: For variables we must not push an initial value (such as
       // 'undefined') because we may have a (legal) redeclaration and we
       // must not destroy the current value.
-      if (mode == Variable::CONST) {
-        __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
-        __ Push(cp, r2, r1, r0);
-      } else if (function != NULL) {
+      if (function != NULL) {
         __ Push(cp, r2, r1);
         // Push initial value for function declaration.
         VisitForStackValue(function);
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
+        __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
+        __ Push(cp, r2, r1, r0);
       } else {
         __ mov(r0, Operand(Smi::FromInt(0)));  // No initial value!
         __ Push(cp, r2, r1, r0);
@@ -1279,6 +1279,20 @@
       __ cmp(r0, ip);
       __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
       context()->Plug(r0);
+    } else if (var->mode() == Variable::LET) {
+      // Let bindings may be the hole value if they have not been initialized.
+      // Throw a type error in this case.
+      Label done;
+      MemOperand slot_operand = EmitSlotSearch(slot, r0);
+      __ ldr(r0, slot_operand);
+      __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+      __ cmp(r0, ip);
+      __ b(ne, &done);
+      __ mov(r0, Operand(var->name()));
+      __ push(r0);
+      __ CallRuntime(Runtime::kThrowReferenceError, 1);
+      __ bind(&done);
+      context()->Plug(r0);
     } else {
       context()->Plug(slot);
     }
@@ -1859,6 +1873,59 @@
     }
     __ bind(&skip);
 
+  } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
+    // Perform the assignment for non-const variables.  Const assignments
+    // are simply skipped.
+    Slot* slot = var->AsSlot();
+    switch (slot->type()) {
+      case Slot::PARAMETER:
+      case Slot::LOCAL: {
+        Label assign;
+        // Check for an initialized let binding.
+        __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
+        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+        __ cmp(r1, ip);
+        __ b(ne, &assign);
+        __ mov(r1, Operand(var->name()));
+        __ push(r1);
+        __ CallRuntime(Runtime::kThrowReferenceError, 1);
+        // Perform the assignment.
+        __ bind(&assign);
+        __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
+        break;
+      }
+      case Slot::CONTEXT: {
+        // Let variables may be the hole value if they have not been
+        // initialized. Throw a type error in this case.
+        Label assign;
+        MemOperand target = EmitSlotSearch(slot, r1);
+        // Check for an initialized let binding.
+        __ ldr(r3, target);
+        __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+        __ cmp(r3, ip);
+        __ b(ne, &assign);
+        __ mov(r3, Operand(var->name()));
+        __ push(r3);
+        __ CallRuntime(Runtime::kThrowReferenceError, 1);
+        // Perform the assignment.
+        __ bind(&assign);
+        __ str(result_register(), target);
+        // RecordWrite may destroy all its register arguments.
+        __ mov(r3, result_register());
+        int offset = Context::SlotOffset(slot->index());
+        __ RecordWrite(r1, Operand(offset), r2, r3);
+        break;
+      }
+      case Slot::LOOKUP:
+        // Call the runtime for the assignment.
+        __ push(r0);  // Value.
+        __ mov(r1, Operand(slot->var()->name()));
+        __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+        __ Push(cp, r1, r0);  // Context, name, strict mode.
+        __ CallRuntime(Runtime::kStoreContextSlot, 4);
+        break;
+    }
+
   } else if (var->mode() != Variable::CONST) {
     // Perform the assignment for non-const variables.  Const assignments
     // are simply skipped.
@@ -3194,7 +3261,7 @@
 
   Label done, not_found;
   // tmp now holds finger offset as a smi.
-  ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
   // r2 now holds finger offset as a smi.
   __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4182,7 +4249,7 @@
   // Cook return address in link register to stack (smi encoded Code* delta)
   __ sub(r1, lr, Operand(masm_->CodeObject()));
   ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   __ add(r1, r1, Operand(r1));  // Convert to smi.
   __ push(r1);
 }
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 6038153..6bad5ac 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -337,7 +337,7 @@
   // Fast case: Do the load.
   __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   // The key is a smi.
-  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
   __ ldr(scratch2,
          MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
@@ -370,7 +370,7 @@
   // Is the string a symbol?
   // map: key map
   __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
-  ASSERT(kSymbolTag != 0);
+  STATIC_ASSERT(kSymbolTag != 0);
   __ tst(hash, Operand(kIsSymbolMask));
   __ b(eq, not_symbol);
 }
@@ -1333,7 +1333,7 @@
   __ cmp(key, Operand(ip));
   __ b(hs, &slow);
   // Calculate key + 1 as smi.
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   __ add(r4, key, Operand(Smi::FromInt(1)));
   __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
   __ b(&fast);
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 65a6169..976576b 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -3473,8 +3473,6 @@
 
   // Dispatch on the indirect string shape: slice or cons.
   Label cons_string;
-  const uint32_t kSlicedNotConsMask = kSlicedStringTag & ~kConsStringTag;
-  ASSERT(IsPowerOf2(kSlicedNotConsMask) && kSlicedNotConsMask != 0);
   __ tst(result, Operand(kSlicedNotConsMask));
   __ b(eq, &cons_string);
 
@@ -3759,7 +3757,7 @@
   LOperand* input = instr->InputAt(0);
   ASSERT(input->IsRegister() && input->Equals(instr->result()));
   if (instr->needs_check()) {
-    ASSERT(kHeapObjectTag == 1);
+    STATIC_ASSERT(kHeapObjectTag == 1);
     // If the input is a HeapObject, SmiUntag will set the carry flag.
     __ SmiUntag(ToRegister(input), SetCC);
     DeoptimizeIf(cs, instr->environment());
@@ -3844,7 +3842,7 @@
   // The input was optimistically untagged; revert it.
   // The carry flag is set when we reach this deferred code as we just executed
   // SmiUntag(heap_object, SetCC)
-  ASSERT(kHeapObjectTag == 1);
+  STATIC_ASSERT(kHeapObjectTag == 1);
   __ adc(input_reg, input_reg, Operand(input_reg));
 
   // Heap number map check.
diff --git a/src/arm/regexp-macro-assembler-arm.cc b/src/arm/regexp-macro-assembler-arm.cc
index 81645c7..cd76edb 100644
--- a/src/arm/regexp-macro-assembler-arm.cc
+++ b/src/arm/regexp-macro-assembler-arm.cc
@@ -1049,7 +1049,7 @@
   MaybeObject* result = Execution::HandleStackGuardInterrupt();
 
   if (*code_handle != re_code) {  // Return address no longer valid
-    int delta = *code_handle - re_code;
+    int delta = code_handle->address() - re_code->address();
     // Overwrite the return address on the stack.
     *return_address += delta;
   }
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 16e6468..5345892 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -3497,7 +3497,7 @@
 
   // We are not untagging smi key and instead work with it
   // as if it was premultiplied by 2.
-  ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
+  STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
 
   Register value = r2;
   switch (elements_kind) {
@@ -4147,7 +4147,7 @@
 
   // Load the result and make sure it's not the hole.
   __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
-  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
   __ ldr(r4,
          MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
@@ -4279,7 +4279,7 @@
 
   __ add(scratch,
          elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
-  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
   __ str(value_reg,
          MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
   __ RecordWrite(scratch,
diff --git a/src/array.js b/src/array.js
index a12fdc8..281c507 100644
--- a/src/array.js
+++ b/src/array.js
@@ -996,6 +996,9 @@
   if (!IS_FUNCTION(f)) {
     throw MakeTypeError('called_non_callable', [ f ]);
   }
+  if (IS_NULL_OR_UNDEFINED(receiver)) {
+    receiver = %GetDefaultReceiver(f) || receiver;
+  }
   // Pull out the length so that modifications to the length in the
   // loop will not affect the looping.
   var length = ToUint32(this.length);
@@ -1004,7 +1007,7 @@
   for (var i = 0; i < length; i++) {
     var current = this[i];
     if (!IS_UNDEFINED(current) || i in this) {
-      if (f.call(receiver, current, i, this)) {
+      if (%_CallFunction(receiver, current, i, this, f)) {
         result[result_length++] = current;
       }
     }
@@ -1022,13 +1025,16 @@
   if (!IS_FUNCTION(f)) {
     throw MakeTypeError('called_non_callable', [ f ]);
   }
+  if (IS_NULL_OR_UNDEFINED(receiver)) {
+    receiver = %GetDefaultReceiver(f) || receiver;
+  }
   // Pull out the length so that modifications to the length in the
   // loop will not affect the looping.
   var length =  TO_UINT32(this.length);
   for (var i = 0; i < length; i++) {
     var current = this[i];
     if (!IS_UNDEFINED(current) || i in this) {
-      f.call(receiver, current, i, this);
+      %_CallFunction(receiver, current, i, this, f);
     }
   }
 }
@@ -1045,13 +1051,16 @@
   if (!IS_FUNCTION(f)) {
     throw MakeTypeError('called_non_callable', [ f ]);
   }
+  if (IS_NULL_OR_UNDEFINED(receiver)) {
+    receiver = %GetDefaultReceiver(f) || receiver;
+  }
   // Pull out the length so that modifications to the length in the
   // loop will not affect the looping.
   var length = TO_UINT32(this.length);
   for (var i = 0; i < length; i++) {
     var current = this[i];
     if (!IS_UNDEFINED(current) || i in this) {
-      if (f.call(receiver, current, i, this)) return true;
+      if (%_CallFunction(receiver, current, i, this, f)) return true;
     }
   }
   return false;
@@ -1067,13 +1076,16 @@
   if (!IS_FUNCTION(f)) {
     throw MakeTypeError('called_non_callable', [ f ]);
   }
+  if (IS_NULL_OR_UNDEFINED(receiver)) {
+    receiver = %GetDefaultReceiver(f) || receiver;
+  }
   // Pull out the length so that modifications to the length in the
   // loop will not affect the looping.
   var length = TO_UINT32(this.length);
   for (var i = 0; i < length; i++) {
     var current = this[i];
     if (!IS_UNDEFINED(current) || i in this) {
-      if (!f.call(receiver, current, i, this)) return false;
+      if (!%_CallFunction(receiver, current, i, this, f)) return false;
     }
   }
   return true;
@@ -1088,6 +1100,9 @@
   if (!IS_FUNCTION(f)) {
     throw MakeTypeError('called_non_callable', [ f ]);
   }
+  if (IS_NULL_OR_UNDEFINED(receiver)) {
+    receiver = %GetDefaultReceiver(f) || receiver;
+  }
   // Pull out the length so that modifications to the length in the
   // loop will not affect the looping.
   var length = TO_UINT32(this.length);
@@ -1096,7 +1111,7 @@
   for (var i = 0; i < length; i++) {
     var current = this[i];
     if (!IS_UNDEFINED(current) || i in this) {
-      accumulator[i] = f.call(receiver, current, i, this);
+      accumulator[i] = %_CallFunction(receiver, current, i, this, f);
     }
   }
   %MoveArrayContents(accumulator, result);
@@ -1233,6 +1248,7 @@
   if (!IS_FUNCTION(callback)) {
     throw MakeTypeError('called_non_callable', [callback]);
   }
+
   // Pull out the length so that modifications to the length in the
   // loop will not affect the looping.
   var length = ToUint32(this.length);
@@ -1249,10 +1265,11 @@
     throw MakeTypeError('reduce_no_initial', []);
   }
 
+  var receiver = %GetDefaultReceiver(callback);
   for (; i < length; i++) {
     var element = this[i];
     if (!IS_UNDEFINED(element) || i in this) {
-      current = callback.call(void 0, current, element, i, this);
+      current = %_CallFunction(receiver, current, element, i, this, callback);
     }
   }
   return current;
@@ -1280,10 +1297,11 @@
     throw MakeTypeError('reduce_no_initial', []);
   }
 
+  var receiver = %GetDefaultReceiver(callback);
   for (; i >= 0; i--) {
     var element = this[i];
     if (!IS_UNDEFINED(element) || i in this) {
-      current = callback.call(void 0, current, element, i, this);
+      current = %_CallFunction(receiver, current, element, i, this, callback);
     }
   }
   return current;
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 4f7cf40..9f01664 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -1160,7 +1160,7 @@
 
 
   {
-    // Setup the call-as-function delegate.
+    // Set up the call-as-function delegate.
     Handle<Code> code =
         Handle<Code>(isolate->builtins()->builtin(
             Builtins::kHandleApiCallAsFunction));
@@ -1172,7 +1172,7 @@
   }
 
   {
-    // Setup the call-as-constructor delegate.
+    // Set up the call-as-constructor delegate.
     Handle<Code> code =
         Handle<Code>(isolate->builtins()->builtin(
             Builtins::kHandleApiCallAsConstructor));
@@ -1192,15 +1192,15 @@
 
 
 void Genesis::InitializeExperimentalGlobal() {
-  Isolate* isolate = this->isolate();
   Handle<JSObject> global = Handle<JSObject>(global_context()->global());
 
   // TODO(mstarzinger): Move this into Genesis::InitializeGlobal once we no
   // longer need to live behind a flag, so WeakMap gets added to the snapshot.
   if (FLAG_harmony_weakmaps) {  // -- W e a k M a p
+    Handle<JSObject> prototype =
+        factory()->NewJSObject(isolate()->object_function(), TENURED);
     InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize,
-                    isolate->initial_object_prototype(),
-                    Builtins::kIllegal, true);
+                    prototype, Builtins::kIllegal, true);
   }
 }
 
diff --git a/src/checks.h b/src/checks.h
index a560b2f..2f359f6 100644
--- a/src/checks.h
+++ b/src/checks.h
@@ -251,9 +251,9 @@
 // actually causes each use to introduce a new defined type with a
 // name depending on the source line.
 template <int> class StaticAssertionHelper { };
-#define STATIC_CHECK(test)                                                  \
-  typedef                                                                   \
-    StaticAssertionHelper<sizeof(StaticAssertion<static_cast<bool>(test)>)> \
+#define STATIC_CHECK(test)                                                    \
+  typedef                                                                     \
+    StaticAssertionHelper<sizeof(StaticAssertion<static_cast<bool>((test))>)> \
     SEMI_STATIC_JOIN(__StaticAssertTypedef__, __LINE__)
 
 
diff --git a/src/contexts.cc b/src/contexts.cc
index c0e7242..4f93abd 100644
--- a/src/contexts.cc
+++ b/src/contexts.cc
@@ -87,13 +87,15 @@
 Handle<Object> Context::Lookup(Handle<String> name,
                                ContextLookupFlags flags,
                                int* index_,
-                               PropertyAttributes* attributes) {
+                               PropertyAttributes* attributes,
+                               BindingFlags* binding_flags) {
   Isolate* isolate = GetIsolate();
   Handle<Context> context(this, isolate);
 
   bool follow_context_chain = (flags & FOLLOW_CONTEXT_CHAIN) != 0;
   *index_ = -1;
   *attributes = ABSENT;
+  *binding_flags = MISSING_BINDING;
 
   if (FLAG_trace_contexts) {
     PrintF("Context::Lookup(");
@@ -118,6 +120,7 @@
           }
           *index_ = Context::THROWN_OBJECT_INDEX;
           *attributes = NONE;
+          *binding_flags = MUTABLE_IS_INITIALIZED;
           return context;
         }
       } else {
@@ -180,11 +183,16 @@
         switch (mode) {
           case Variable::INTERNAL:  // Fall through.
           case Variable::VAR:
+            *attributes = NONE;
+            *binding_flags = MUTABLE_IS_INITIALIZED;
+            break;
           case Variable::LET:
             *attributes = NONE;
+            *binding_flags = MUTABLE_CHECK_INITIALIZED;
             break;
           case Variable::CONST:
             *attributes = READ_ONLY;
+            *binding_flags = IMMUTABLE_CHECK_INITIALIZED;
             break;
           case Variable::DYNAMIC:
           case Variable::DYNAMIC_GLOBAL:
@@ -207,6 +215,7 @@
           }
           *index_ = index;
           *attributes = READ_ONLY;
+          *binding_flags = IMMUTABLE_IS_INITIALIZED;
           return context;
         }
       }
diff --git a/src/contexts.h b/src/contexts.h
index 3d9e7f4..505f86c 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -44,6 +44,30 @@
 };
 
 
+// ES5 10.2 defines lexical environments with mutable and immutable bindings.
+// Immutable bindings have two states, initialized and uninitialized, and
+// their state is changed by the InitializeImmutableBinding method.
+//
+// The harmony proposal for block scoped bindings also introduces the
+// uninitialized state for mutable bindings. A 'let' declared variable
+// is a mutable binding that is created uninitalized upon activation of its
+// lexical environment and it is initialized when evaluating its declaration
+// statement. Var declared variables are mutable bindings that are
+// immediately initialized upon creation. The BindingFlags enum represents
+// information if a binding has definitely been initialized. 'const' declared
+// variables are created as uninitialized immutable bindings.
+
+// In harmony mode accessing an uninitialized binding produces a reference
+// error.
+enum BindingFlags {
+  MUTABLE_IS_INITIALIZED,
+  MUTABLE_CHECK_INITIALIZED,
+  IMMUTABLE_IS_INITIALIZED,
+  IMMUTABLE_CHECK_INITIALIZED,
+  MISSING_BINDING
+};
+
+
 // Heap-allocated activation contexts.
 //
 // Contexts are implemented as FixedArray objects; the Context
@@ -351,8 +375,11 @@
   // 4) index_ < 0 && result.is_null():
   //    there was no context found with the corresponding property.
   //    attributes == ABSENT.
-  Handle<Object> Lookup(Handle<String> name, ContextLookupFlags flags,
-                        int* index_, PropertyAttributes* attributes);
+  Handle<Object> Lookup(Handle<String> name,
+                        ContextLookupFlags flags,
+                        int* index_,
+                        PropertyAttributes* attributes,
+                        BindingFlags* binding_flags);
 
   // Determine if a local variable with the given name exists in a
   // context.  Do not consider context extension objects.  This is
diff --git a/src/d8.cc b/src/d8.cc
index 120496e..025c772 100644
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -678,9 +678,9 @@
                        FunctionTemplate::New(PixelArray));
 
 #ifdef LIVE_OBJECT_LIST
-  global_template->Set(String::New("lol_is_enabled"), Boolean::New(true));
+  global_template->Set(String::New("lol_is_enabled"), True());
 #else
-  global_template->Set(String::New("lol_is_enabled"), Boolean::New(false));
+  global_template->Set(String::New("lol_is_enabled"), False());
 #endif
 
 #ifndef V8_SHARED
diff --git a/src/elements.cc b/src/elements.cc
index 70d58b3..1afc5da 100644
--- a/src/elements.cc
+++ b/src/elements.cc
@@ -590,7 +590,6 @@
     default:
       UNREACHABLE();
       return NULL;
-      break;
   }
 }
 
diff --git a/src/heap.cc b/src/heap.cc
index 90d0e11..84909d5 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -4085,10 +4085,9 @@
                                         SerializedScopeInfo* scope_info) {
   Object* result;
   { MaybeObject* maybe_result =
-        AllocateFixedArray(scope_info->NumberOfContextSlots());
+        AllocateFixedArrayWithHoles(scope_info->NumberOfContextSlots());
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  // TODO(keuchel): properly initialize context slots.
   Context* context = reinterpret_cast<Context*>(result);
   context->set_map(block_context_map());
   context->set_closure(function);
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index dd3a591..55bf2dc 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -3122,6 +3122,8 @@
   Variable* variable = expr->AsVariable();
   if (variable == NULL) {
     return Bailout("reference to rewritten variable");
+  } else if (variable->mode() == Variable::LET) {
+    return Bailout("reference to let variable");
   } else if (variable->IsStackAllocated()) {
     HValue* value = environment()->Lookup(variable);
     if (variable->mode() == Variable::CONST &&
@@ -3587,8 +3589,9 @@
   BinaryOperation* operation = expr->binary_operation();
 
   if (var != NULL) {
-    if (var->mode() == Variable::CONST)  {
-      return Bailout("unsupported const compound assignment");
+    if (var->mode() == Variable::CONST ||
+        var->mode() == Variable::LET)  {
+      return Bailout("unsupported let or const compound assignment");
     }
 
     CHECK_ALIVE(VisitForValue(operation));
@@ -3731,6 +3734,8 @@
       // variables (e.g. initialization inside a loop).
       HValue* old_value = environment()->Lookup(var);
       AddInstruction(new HUseConst(old_value));
+    } else if (var->mode() == Variable::LET) {
+      return Bailout("unsupported assignment to let");
     }
 
     if (proxy->IsArguments()) return Bailout("assignment to arguments");
@@ -5804,7 +5809,9 @@
 void HGraphBuilder::VisitDeclaration(Declaration* decl) {
   // We support only declarations that do not require code generation.
   Variable* var = decl->proxy()->var();
-  if (!var->IsStackAllocated() || decl->fun() != NULL) {
+  if (!var->IsStackAllocated() ||
+      decl->fun() != NULL ||
+      decl->mode() == Variable::LET) {
     return Bailout("unsupported declaration");
   }
 
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index f8a85de..845a073 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -373,7 +373,7 @@
   __ LeaveConstructFrame();
 
   // Remove caller arguments from the stack and return.
-  ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   __ pop(ecx);
   __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
   __ push(ecx);
@@ -923,7 +923,7 @@
   // Fill the FixedArray with the hole value. Inline the code if short.
   // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
   static const int kLoopUnfoldLimit = 4;
-  ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
+  STATIC_ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
   if (initial_capacity <= kLoopUnfoldLimit) {
     // Use a scratch register here to have only one reloc info when unfolding
     // the loop.
@@ -975,7 +975,7 @@
 
   // Allocate the JSArray object together with space for a FixedArray with the
   // requested elements.
-  ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
                         times_half_pointer_size,  // array_size is a smi.
                         array_size,
@@ -1100,7 +1100,7 @@
   __ bind(&argc_one_or_more);
   __ cmp(eax, 1);
   __ j(not_equal, &argc_two_or_more);
-  ASSERT(kSmiTag == 0);
+  STATIC_ASSERT(kSmiTag == 0);
   __ mov(ecx, Operand(esp, (push_count + 1) * kPointerSize));
   __ test(ecx, Operand(ecx));
   __ j(not_zero, &not_empty_array);
@@ -1155,7 +1155,7 @@
 
   // Handle construction of an array from a list of arguments.
   __ bind(&argc_two_or_more);
-  ASSERT(kSmiTag == 0);
+  STATIC_ASSERT(kSmiTag == 0);
   __ SmiTag(eax);  // Convet argc to a smi.
   // eax: array_size (smi)
   // edi: constructor
@@ -1437,7 +1437,7 @@
   // Preserve the number of arguments on the stack. Must preserve eax,
   // ebx and ecx because these registers are used when copying the
   // arguments and the receiver.
-  ASSERT(kSmiTagSize == 1);
+  STATIC_ASSERT(kSmiTagSize == 1);
   __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
   __ push(edi);
 }
@@ -1451,7 +1451,7 @@
   __ leave();
 
   // Remove caller arguments from the stack.
-  ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
   __ pop(ecx);
   __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
   __ push(ecx);
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index d76e4bf..85e74b8 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -3396,8 +3396,8 @@
   // a sequential string or an external string.
   // In the case of a sliced string its offset has to be taken into account.
   Label cons_string, check_encoding;
-  STATIC_ASSERT((kConsStringTag < kExternalStringTag));
-  STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   __ cmp(Operand(ebx), Immediate(kExternalStringTag));
   __ j(less, &cons_string);
   __ j(equal, &runtime);
@@ -4872,8 +4872,8 @@
 
   // Handle non-flat strings.
   __ and_(result_, kStringRepresentationMask);
-  STATIC_ASSERT((kConsStringTag < kExternalStringTag));
-  STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   __ cmp(result_, kExternalStringTag);
   __ j(greater, &sliced_string, Label::kNear);
   __ j(equal, &call_runtime_);
@@ -5642,9 +5642,6 @@
 void SubStringStub::Generate(MacroAssembler* masm) {
   Label runtime;
 
-  if (FLAG_string_slices) {
-    __ jmp(&runtime);
-  }
   // Stack frame on entry.
   //  esp[0]: return address
   //  esp[4]: to
@@ -5706,7 +5703,83 @@
   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   __ Set(ecx, Immediate(2));
 
-  __ bind(&result_longer_than_two);
+  if (FLAG_string_slices) {
+    Label copy_routine;
+    // If coming from the make_two_character_string path, the string
+    // is too short to be sliced anyways.
+    STATIC_ASSERT(2 < SlicedString::kMinLength);
+    __ jmp(&copy_routine);
+    __ bind(&result_longer_than_two);
+
+    // eax: string
+    // ebx: instance type
+    // ecx: sub string length
+    // edx: from index (smi)
+    Label allocate_slice, sliced_string, seq_string;
+    __ cmp(ecx, SlicedString::kMinLength);
+    // Short slice.  Copy instead of slicing.
+    __ j(less, &copy_routine);
+    STATIC_ASSERT(kSeqStringTag == 0);
+    __ test(ebx, Immediate(kStringRepresentationMask));
+    __ j(zero, &seq_string, Label::kNear);
+    STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+    STATIC_ASSERT(kIsIndirectStringMask != 0);
+    __ test(ebx, Immediate(kIsIndirectStringMask));
+    // External string.  Jump to runtime.
+    __ j(zero, &runtime);
+
+    Factory* factory = masm->isolate()->factory();
+    __ test(ebx, Immediate(kSlicedNotConsMask));
+    __ j(not_zero, &sliced_string, Label::kNear);
+    // Cons string.  Check whether it is flat, then fetch first part.
+    __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
+           factory->empty_string());
+    __ j(not_equal, &runtime);
+    __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
+    __ jmp(&allocate_slice, Label::kNear);
+
+    __ bind(&sliced_string);
+    // Sliced string.  Fetch parent and correct start index by offset.
+    __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
+    __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
+    __ jmp(&allocate_slice, Label::kNear);
+
+    __ bind(&seq_string);
+    // Sequential string.  Just move string to the right register.
+    __ mov(edi, eax);
+
+    __ bind(&allocate_slice);
+    // edi: underlying subject string
+    // ebx: instance type of original subject string
+    // edx: offset
+    // ecx: length
+    // Allocate new sliced string.  At this point we do not reload the instance
+    // type including the string encoding because we simply rely on the info
+    // provided by the original string.  It does not matter if the original
+    // string's encoding is wrong because we always have to recheck encoding of
+    // the newly created string's parent anyways due to externalized strings.
+    Label two_byte_slice, set_slice_header;
+    STATIC_ASSERT(kAsciiStringTag != 0);
+    __ test(ebx, Immediate(kAsciiStringTag));
+    __ j(zero, &two_byte_slice, Label::kNear);
+    __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
+    __ jmp(&set_slice_header, Label::kNear);
+    __ bind(&two_byte_slice);
+    __ AllocateSlicedString(eax, ebx, no_reg, &runtime);
+    __ bind(&set_slice_header);
+    __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
+    __ SmiTag(ecx);
+    __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
+    __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
+    __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
+           Immediate(String::kEmptyHashField));
+    __ jmp(&return_eax);
+
+    __ bind(&copy_routine);
+  } else {
+    __ bind(&result_longer_than_two);
+  }
+
   // eax: string
   // ebx: instance type
   // ecx: result string length
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 799ba73..52e33d6 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -693,12 +693,12 @@
   switch (slot->type()) {
     case Slot::PARAMETER:
     case Slot::LOCAL:
-      if (mode == Variable::CONST) {
-        __ mov(Operand(ebp, SlotOffset(slot)),
-               Immediate(isolate()->factory()->the_hole_value()));
-      } else if (function != NULL) {
+      if (function != NULL) {
         VisitForAccumulatorValue(function);
         __ mov(Operand(ebp, SlotOffset(slot)), result_register());
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
+        __ mov(Operand(ebp, SlotOffset(slot)),
+               Immediate(isolate()->factory()->the_hole_value()));
       }
       break;
 
@@ -717,16 +717,16 @@
         __ cmp(ebx, isolate()->factory()->catch_context_map());
         __ Check(not_equal, "Declaration in catch context.");
       }
-      if (mode == Variable::CONST) {
-        __ mov(ContextOperand(esi, slot->index()),
-               Immediate(isolate()->factory()->the_hole_value()));
-        // No write barrier since the hole value is in old space.
-      } else if (function != NULL) {
+      if (function != NULL) {
         VisitForAccumulatorValue(function);
         __ mov(ContextOperand(esi, slot->index()), result_register());
         int offset = Context::SlotOffset(slot->index());
         __ mov(ebx, esi);
         __ RecordWrite(ebx, offset, result_register(), ecx);
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
+        __ mov(ContextOperand(esi, slot->index()),
+               Immediate(isolate()->factory()->the_hole_value()));
+        // No write barrier since the hole value is in old space.
       }
       break;
 
@@ -744,11 +744,11 @@
       // 'undefined') because we may have a (legal) redeclaration and we
       // must not destroy the current value.
       increment_stack_height(3);
-      if (mode == Variable::CONST) {
+      if (function != NULL) {
+        VisitForStackValue(function);
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
         __ push(Immediate(isolate()->factory()->the_hole_value()));
         increment_stack_height();
-      } else if (function != NULL) {
-        VisitForStackValue(function);
       } else {
         __ push(Immediate(Smi::FromInt(0)));  // No initial value!
         increment_stack_height();
@@ -1268,6 +1268,18 @@
       __ mov(eax, isolate()->factory()->undefined_value());
       __ bind(&done);
       context()->Plug(eax);
+    } else if (var->mode() == Variable::LET) {
+      // Let bindings may be the hole value if they have not been initialized.
+      // Throw a type error in this case.
+      Label done;
+      MemOperand slot_operand = EmitSlotSearch(slot, eax);
+      __ mov(eax, slot_operand);
+      __ cmp(eax, isolate()->factory()->the_hole_value());
+      __ j(not_equal, &done, Label::kNear);
+      __ push(Immediate(var->name()));
+      __ CallRuntime(Runtime::kThrowReferenceError, 1);
+      __ bind(&done);
+      context()->Plug(eax);
     } else {
       context()->Plug(slot);
     }
@@ -1855,6 +1867,57 @@
     }
     __ bind(&skip);
 
+  } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
+    // Perform the assignment for non-const variables.  Const assignments
+    // are simply skipped.
+    Slot* slot = var->AsSlot();
+    switch (slot->type()) {
+      case Slot::PARAMETER:
+      case Slot::LOCAL: {
+        Label assign;
+        // Check for an initialized let binding.
+        __ mov(edx, Operand(ebp, SlotOffset(slot)));
+        __ cmp(edx, isolate()->factory()->the_hole_value());
+        __ j(not_equal, &assign);
+        __ push(Immediate(var->name()));
+        __ CallRuntime(Runtime::kThrowReferenceError, 1);
+        // Perform the assignment.
+        __ bind(&assign);
+        __ mov(Operand(ebp, SlotOffset(slot)), eax);
+        break;
+      }
+
+      case Slot::CONTEXT: {
+        // Let variables may be the hole value if they have not been
+        // initialized. Throw a type error in this case.
+        Label assign;
+        MemOperand target = EmitSlotSearch(slot, ecx);
+        // Check for an initialized let binding.
+        __ mov(edx, target);
+        __ cmp(edx, isolate()->factory()->the_hole_value());
+        __ j(not_equal, &assign, Label::kNear);
+        __ push(Immediate(var->name()));
+        __ CallRuntime(Runtime::kThrowReferenceError, 1);
+        // Perform the assignment.
+        __ bind(&assign);
+        __ mov(target, eax);
+        // The value of the assignment is in eax. RecordWrite clobbers its
+        // register arguments.
+        __ mov(edx, eax);
+        int offset = Context::SlotOffset(slot->index());
+        __ RecordWrite(ecx, offset, edx, ebx);
+        break;
+      }
+
+      case Slot::LOOKUP:
+        // Call the runtime for the assignment.
+        __ push(eax);  // Value.
+        __ push(esi);  // Context.
+        __ push(Immediate(var->name()));
+        __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+        __ CallRuntime(Runtime::kStoreContextSlot, 4);
+        break;
+    }
   } else if (var->mode() != Variable::CONST) {
     // Perform the assignment for non-const variables.  Const assignments
     // are simply skipped.
@@ -3199,7 +3262,7 @@
 
   Label done, not_found;
   // tmp now holds finger offset as a smi.
-  ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
   __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
   __ j(not_equal, &not_found);
@@ -4242,8 +4305,8 @@
   ASSERT(!result_register().is(edx));
   __ pop(edx);
   __ sub(Operand(edx), Immediate(masm_->CodeObject()));
-  ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
+  STATIC_ASSERT(kSmiTag == 0);
   __ SmiTag(edx);
   __ push(edx);
   // Store result register while executing finally block.
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index 5f143b1..7d3ead2 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -324,7 +324,7 @@
   __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
   __ j(above_equal, out_of_range);
   // Fast case: Do the load.
-  ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
+  STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
   __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
   __ cmp(Operand(scratch), Immediate(FACTORY->the_hole_value()));
   // In case the loaded value is the_hole we have to consult GetProperty
@@ -358,7 +358,7 @@
   __ j(zero, index_string);
 
   // Is the string a symbol?
-  ASSERT(kSymbolTag != 0);
+  STATIC_ASSERT(kSymbolTag != 0);
   __ test_b(FieldOperand(map, Map::kInstanceTypeOffset), kIsSymbolMask);
   __ j(zero, not_symbol);
 }
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 5f67038..32e3074 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -3234,8 +3234,6 @@
 
   // Dispatch on the indirect string shape: slice or cons.
   Label cons_string;
-  const uint32_t kSlicedNotConsMask = kSlicedStringTag & ~kConsStringTag;
-  ASSERT(IsPowerOf2(kSlicedNotConsMask) && kSlicedNotConsMask != 0);
   __ test(result, Immediate(kSlicedNotConsMask));
   __ j(zero, &cons_string, Label::kNear);
 
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 9df5cad..dff174c 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -148,7 +148,7 @@
   Label done;
 
   // Skip barrier if writing a smi.
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   JumpIfSmi(value, &done, Label::kNear);
 
   InNewSpace(object, value, equal, &done, Label::kNear);
@@ -166,8 +166,8 @@
     // Array access: calculate the destination address in the same manner as
     // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
     // into an array of words.
-    ASSERT_EQ(1, kSmiTagSize);
-    ASSERT_EQ(0, kSmiTag);
+    STATIC_ASSERT(kSmiTagSize == 1);
+    STATIC_ASSERT(kSmiTag == 0);
     lea(dst, Operand(object, dst, times_half_pointer_size,
                      FixedArray::kHeaderSize - kHeapObjectTag));
   }
@@ -193,7 +193,7 @@
   Label done;
 
   // Skip barrier if writing a smi.
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   JumpIfSmi(value, &done, Label::kNear);
 
   InNewSpace(object, value, equal, &done);
@@ -326,7 +326,7 @@
                                              Register instance_type) {
   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
-  ASSERT(kNotStringTag != 0);
+  STATIC_ASSERT(kNotStringTag != 0);
   test(instance_type, Immediate(kIsNotStringMask));
   return zero;
 }
@@ -1208,6 +1208,42 @@
 }
 
 
+void MacroAssembler::AllocateSlicedString(Register result,
+                                          Register scratch1,
+                                          Register scratch2,
+                                          Label* gc_required) {
+  // Allocate heap number in new space.
+  AllocateInNewSpace(SlicedString::kSize,
+                     result,
+                     scratch1,
+                     scratch2,
+                     gc_required,
+                     TAG_OBJECT);
+
+  // Set the map. The other fields are left uninitialized.
+  mov(FieldOperand(result, HeapObject::kMapOffset),
+      Immediate(isolate()->factory()->sliced_string_map()));
+}
+
+
+void MacroAssembler::AllocateAsciiSlicedString(Register result,
+                                               Register scratch1,
+                                               Register scratch2,
+                                               Label* gc_required) {
+  // Allocate heap number in new space.
+  AllocateInNewSpace(SlicedString::kSize,
+                     result,
+                     scratch1,
+                     scratch2,
+                     gc_required,
+                     TAG_OBJECT);
+
+  // Set the map. The other fields are left uninitialized.
+  mov(FieldOperand(result, HeapObject::kMapOffset),
+      Immediate(isolate()->factory()->sliced_ascii_string_map()));
+}
+
+
 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
 // long or aligned copies.  The contents of scratch and length are destroyed.
 // Source and destination are incremented by length.
@@ -2166,7 +2202,7 @@
                                                          Register scratch2,
                                                          Label* failure) {
   // Check that both objects are not smis.
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   mov(scratch1, Operand(object1));
   and_(scratch1, Operand(object2));
   JumpIfSmi(scratch1, failure);
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index d79df5e..de9361d 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -275,8 +275,8 @@
 
   // Smi tagging support.
   void SmiTag(Register reg) {
-    ASSERT(kSmiTag == 0);
-    ASSERT(kSmiTagSize == 1);
+    STATIC_ASSERT(kSmiTag == 0);
+    STATIC_ASSERT(kSmiTagSize == 1);
     add(reg, Operand(reg));
   }
   void SmiUntag(Register reg) {
@@ -285,9 +285,9 @@
 
   // Modifies the register even if it does not contain a Smi!
   void SmiUntag(Register reg, Label* is_smi) {
-    ASSERT(kSmiTagSize == 1);
+    STATIC_ASSERT(kSmiTagSize == 1);
     sar(reg, kSmiTagSize);
-    ASSERT(kSmiTag == 0);
+    STATIC_ASSERT(kSmiTag == 0);
     j(not_carry, is_smi);
   }
 
@@ -446,6 +446,17 @@
                                Register scratch2,
                                Label* gc_required);
 
+  // Allocate a raw sliced string object. Only the map field of the result is
+  // initialized.
+  void AllocateSlicedString(Register result,
+                            Register scratch1,
+                            Register scratch2,
+                            Label* gc_required);
+  void AllocateAsciiSlicedString(Register result,
+                                 Register scratch1,
+                                 Register scratch2,
+                                 Label* gc_required);
+
   // Copy memory, byte-by-byte, from source to destination.  Not optimized for
   // long or aligned copies.
   // The contents of index and scratch are destroyed.
diff --git a/src/ia32/regexp-macro-assembler-ia32.cc b/src/ia32/regexp-macro-assembler-ia32.cc
index 7d7de0e..d175d9e 100644
--- a/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/src/ia32/regexp-macro-assembler-ia32.cc
@@ -1080,7 +1080,7 @@
   MaybeObject* result = Execution::HandleStackGuardInterrupt();
 
   if (*code_handle != re_code) {  // Return address no longer valid
-    int delta = *code_handle - re_code;
+    int delta = code_handle->address() - re_code->address();
     // Overwrite the return address on the stack.
     *return_address += delta;
   }
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 4a37b07..621a9bb 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -273,7 +273,7 @@
   // Check that the object is a string.
   __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
-  ASSERT(kNotStringTag != 0);
+  STATIC_ASSERT(kNotStringTag != 0);
   __ test(scratch, Immediate(kNotStringTag));
   __ j(not_zero, non_string_object);
 }
diff --git a/src/mips/assembler-mips.cc b/src/mips/assembler-mips.cc
index 51642e0..28ac557 100644
--- a/src/mips/assembler-mips.cc
+++ b/src/mips/assembler-mips.cc
@@ -780,10 +780,10 @@
 void Assembler::next(Label* L) {
   ASSERT(L->is_linked());
   int link = target_at(L->pos());
-  ASSERT(link > 0 || link == kEndOfChain);
   if (link == kEndOfChain) {
     L->Unuse();
-  } else if (link > 0) {
+  } else {
+    ASSERT(link >= 0);
     L->link_to(link);
   }
 }
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index 2526a6a..9385f2f 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -4527,9 +4527,9 @@
   __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
   __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
   // First check for flat string.
-  __ And(at, a0, Operand(kIsNotStringMask | kStringRepresentationMask));
+  __ And(a1, a0, Operand(kIsNotStringMask | kStringRepresentationMask));
   STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
-  __ Branch(&seq_string, eq, at, Operand(zero_reg));
+  __ Branch(&seq_string, eq, a1, Operand(zero_reg));
 
   // subject: Subject string
   // a0: instance type if Subject string
@@ -4541,10 +4541,10 @@
   // a sequential string or an external string.
   // In the case of a sliced string its offset has to be taken into account.
   Label cons_string, check_encoding;
-  STATIC_ASSERT((kConsStringTag < kExternalStringTag));
-  STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
-  __ Branch(&cons_string, lt, at, Operand(kExternalStringTag));
-  __ Branch(&runtime, eq, at, Operand(kExternalStringTag));
+  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+  __ Branch(&cons_string, lt, a1, Operand(kExternalStringTag));
+  __ Branch(&runtime, eq, a1, Operand(kExternalStringTag));
 
   // String is sliced.
   __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
@@ -5025,8 +5025,8 @@
 
   // Handle non-flat strings.
   __ And(result_, result_, Operand(kStringRepresentationMask));
-  STATIC_ASSERT((kConsStringTag < kExternalStringTag));
-  STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   __ Branch(&sliced_string, gt, result_, Operand(kExternalStringTag));
   __ Branch(&call_runtime_, eq, result_, Operand(kExternalStringTag));
 
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index cf48ccf..3c0c316 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -1500,9 +1500,7 @@
 
     // Update the write barrier for the array store with v0 as the scratch
     // register.
-    __ li(a2, Operand(offset));
-    // TODO(PJ): double check this RecordWrite call.
-    __ RecordWrite(a1, a2, result_register());
+    __ RecordWrite(a1, Operand(offset), a2, result_register());
 
     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
   }
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index 8e4b8ef..c7f727b 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -1557,12 +1557,14 @@
         b(offset);
         break;
       case eq:
+        ASSERT(!scratch.is(rs));
         r2 = scratch;
         li(r2, rt);
         offset = shifted_branch_offset(L, false);
         beq(rs, r2, offset);
         break;
       case ne:
+        ASSERT(!scratch.is(rs));
         r2 = scratch;
         li(r2, rt);
         offset = shifted_branch_offset(L, false);
@@ -1574,6 +1576,7 @@
           offset = shifted_branch_offset(L, false);
           bgtz(rs, offset);
         } else {
+          ASSERT(!scratch.is(rs));
           r2 = scratch;
           li(r2, rt);
           slt(scratch, r2, rs);
@@ -1590,6 +1593,7 @@
           offset = shifted_branch_offset(L, false);
           beq(scratch, zero_reg, offset);
         } else {
+          ASSERT(!scratch.is(rs));
           r2 = scratch;
           li(r2, rt);
           slt(scratch, rs, r2);
@@ -1606,6 +1610,7 @@
           offset = shifted_branch_offset(L, false);
           bne(scratch, zero_reg, offset);
         } else {
+          ASSERT(!scratch.is(rs));
           r2 = scratch;
           li(r2, rt);
           slt(scratch, rs, r2);
@@ -1618,6 +1623,7 @@
           offset = shifted_branch_offset(L, false);
           blez(rs, offset);
         } else {
+          ASSERT(!scratch.is(rs));
           r2 = scratch;
           li(r2, rt);
           slt(scratch, r2, rs);
@@ -1631,6 +1637,7 @@
           offset = shifted_branch_offset(L, false);
           bgtz(rs, offset);
         } else {
+          ASSERT(!scratch.is(rs));
           r2 = scratch;
           li(r2, rt);
           sltu(scratch, r2, rs);
@@ -1647,6 +1654,7 @@
           offset = shifted_branch_offset(L, false);
           beq(scratch, zero_reg, offset);
         } else {
+          ASSERT(!scratch.is(rs));
           r2 = scratch;
           li(r2, rt);
           sltu(scratch, rs, r2);
@@ -1663,6 +1671,7 @@
           offset = shifted_branch_offset(L, false);
           bne(scratch, zero_reg, offset);
         } else {
+          ASSERT(!scratch.is(rs));
           r2 = scratch;
           li(r2, rt);
           sltu(scratch, rs, r2);
@@ -1675,6 +1684,7 @@
           offset = shifted_branch_offset(L, false);
           b(offset);
         } else {
+          ASSERT(!scratch.is(rs));
           r2 = scratch;
           li(r2, rt);
           sltu(scratch, r2, rs);
diff --git a/src/mips/regexp-macro-assembler-mips.cc b/src/mips/regexp-macro-assembler-mips.cc
index 45d3963..63e836f 100644
--- a/src/mips/regexp-macro-assembler-mips.cc
+++ b/src/mips/regexp-macro-assembler-mips.cc
@@ -1050,7 +1050,7 @@
   MaybeObject* result = Execution::HandleStackGuardInterrupt();
 
   if (*code_handle != re_code) {  // Return address no longer valid.
-    int delta = *code_handle - re_code;
+    int delta = code_handle->address() - re_code->address();
     // Overwrite the return address on the stack.
     *return_address += delta;
   }
diff --git a/src/mips/simulator-mips.cc b/src/mips/simulator-mips.cc
index 30e12e7..3b38695 100644
--- a/src/mips/simulator-mips.cc
+++ b/src/mips/simulator-mips.cc
@@ -1409,20 +1409,11 @@
     int32_t arg1 = get_register(a1);
     int32_t arg2 = get_register(a2);
     int32_t arg3 = get_register(a3);
-    int32_t arg4 = 0;
-    int32_t arg5 = 0;
 
-    // Need to check if sp is valid before assigning arg4, arg5.
-    // This is a fix for cctest test-api/CatchStackOverflow which causes
-    // the stack to overflow. For some reason arm doesn't need this
-    // stack check here.
     int32_t* stack_pointer = reinterpret_cast<int32_t*>(get_register(sp));
-    int32_t* stack = reinterpret_cast<int32_t*>(stack_);
-    if (stack_pointer >= stack && stack_pointer < stack + stack_size_ - 5) {
-      // Args 4 and 5 are on the stack after the reserved space for args 0..3.
-      arg4 = stack_pointer[4];
-      arg5 = stack_pointer[5];
-    }
+    // Args 4 and 5 are on the stack after the reserved space for args 0..3.
+    int32_t arg4 = stack_pointer[4];
+    int32_t arg5 = stack_pointer[5];
 
     bool fp_call =
          (redirection->type() == ExternalReference::BUILTIN_FP_FP_CALL) ||
diff --git a/src/objects-inl.h b/src/objects-inl.h
index b4c6a3a..3b0e96f 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -171,7 +171,7 @@
   // Because the symbol tag is non-zero and no non-string types have the
   // symbol bit set we can test for symbols with a very simple test
   // operation.
-  ASSERT(kSymbolTag != 0);
+  STATIC_ASSERT(kSymbolTag != 0);
   ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
   return (type & kIsSymbolMask) != 0;
 }
@@ -256,7 +256,7 @@
 
 bool StringShape::IsSymbol() {
   ASSERT(valid());
-  ASSERT(kSymbolTag != 0);
+  STATIC_ASSERT(kSymbolTag != 0);
   return (type_ & kIsSymbolMask) != 0;
 }
 
diff --git a/src/objects.h b/src/objects.h
index 6c8888b..53ba981 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -496,6 +496,11 @@
 STATIC_ASSERT(
     (kSlicedStringTag & kIsIndirectStringMask) == kIsIndirectStringTag);
 
+// Use this mask to distinguish between cons and slice only after making
+// sure that the string is one of the two (an indirect string).
+const uint32_t kSlicedNotConsMask = kSlicedStringTag & ~kConsStringTag;
+STATIC_ASSERT(IS_POWER_OF_TWO(kSlicedNotConsMask) && kSlicedNotConsMask != 0);
+
 // If bit 7 is clear, then bit 3 indicates whether this two-byte
 // string actually contains ascii data.
 const uint32_t kAsciiDataHintMask = 0x08;
diff --git a/src/parser.cc b/src/parser.cc
index 844dd70..615c6ea 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -1609,7 +1609,13 @@
   //   ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[',']
 
   Variable::Mode mode = Variable::VAR;
+  // True if the binding needs initialization. 'let' and 'const' declared
+  // bindings are created uninitialized by their declaration nodes and
+  // need initialization. 'var' declared bindings are always initialized
+  // immediately by their declaration nodes.
+  bool needs_init = false;
   bool is_const = false;
+  Token::Value init_op = Token::INIT_VAR;
   if (peek() == Token::VAR) {
     Consume(Token::VAR);
   } else if (peek() == Token::CONST) {
@@ -1621,6 +1627,8 @@
     }
     mode = Variable::CONST;
     is_const = true;
+    needs_init = true;
+    init_op = Token::INIT_CONST;
   } else if (peek() == Token::LET) {
     Consume(Token::LET);
     if (var_context != kSourceElement &&
@@ -1631,6 +1639,8 @@
       return NULL;
     }
     mode = Variable::LET;
+    needs_init = true;
+    init_op = Token::INIT_LET;
   } else {
     UNREACHABLE();  // by current callers
   }
@@ -1732,9 +1742,8 @@
       }
     }
 
-    // Make sure that 'const c' actually initializes 'c' to undefined
-    // even though it seems like a stupid thing to do.
-    if (value == NULL && is_const) {
+    // Make sure that 'const x' and 'let x' initialize 'x' to undefined.
+    if (value == NULL && needs_init) {
       value = GetLiteralUndefined();
     }
 
@@ -1822,12 +1831,11 @@
     // for constant lookups is always the function context, while it is
     // the top context for variables). Sigh...
     if (value != NULL) {
-      Token::Value op = (is_const ? Token::INIT_CONST : Token::INIT_VAR);
       bool in_with = is_const ? false : inside_with();
       VariableProxy* proxy =
           initialization_scope->NewUnresolved(name, in_with);
       Assignment* assignment =
-          new(zone()) Assignment(isolate(), op, proxy, value, position);
+          new(zone()) Assignment(isolate(), init_op, proxy, value, position);
       if (block) {
         block->AddStatement(new(zone()) ExpressionStatement(assignment));
       }
diff --git a/src/runtime.cc b/src/runtime.cc
index 802fd68..9d3bb1d 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -1306,8 +1306,9 @@
   int index;
   PropertyAttributes attributes;
   ContextLookupFlags flags = DONT_FOLLOW_CHAINS;
+  BindingFlags binding_flags;
   Handle<Object> holder =
-      context->Lookup(name, flags, &index, &attributes);
+      context->Lookup(name, flags, &index, &attributes, &binding_flags);
 
   if (attributes != ABSENT) {
     // The name was declared before; check for conflicting
@@ -1594,8 +1595,9 @@
   int index;
   PropertyAttributes attributes;
   ContextLookupFlags flags = FOLLOW_CHAINS;
+  BindingFlags binding_flags;
   Handle<Object> holder =
-      context->Lookup(name, flags, &index, &attributes);
+      context->Lookup(name, flags, &index, &attributes, &binding_flags);
 
   // In most situations, the property introduced by the const
   // declaration should be present in the context extension object.
@@ -8386,7 +8388,12 @@
   int index;
   PropertyAttributes attributes;
   ContextLookupFlags flags = FOLLOW_CHAINS;
-  Handle<Object> holder = context->Lookup(name, flags, &index, &attributes);
+  BindingFlags binding_flags;
+  Handle<Object> holder = context->Lookup(name,
+                                          flags,
+                                          &index,
+                                          &attributes,
+                                          &binding_flags);
 
   // If the slot was not found the result is true.
   if (holder.is_null()) {
@@ -8488,7 +8495,12 @@
   int index;
   PropertyAttributes attributes;
   ContextLookupFlags flags = FOLLOW_CHAINS;
-  Handle<Object> holder = context->Lookup(name, flags, &index, &attributes);
+  BindingFlags binding_flags;
+  Handle<Object> holder = context->Lookup(name,
+                                          flags,
+                                          &index,
+                                          &attributes,
+                                          &binding_flags);
 
   // If the index is non-negative, the slot has been found in a local
   // variable or a parameter. Read it from the context object or the
@@ -8504,7 +8516,17 @@
     MaybeObject* value = (holder->IsContext())
         ? Context::cast(*holder)->get(index)
         : JSObject::cast(*holder)->GetElement(index);
-    return MakePair(Unhole(isolate->heap(), value, attributes), *receiver);
+    // Check for uninitialized bindings.
+    if (holder->IsContext() &&
+        binding_flags == MUTABLE_CHECK_INITIALIZED &&
+        value->IsTheHole()) {
+      Handle<Object> reference_error =
+          isolate->factory()->NewReferenceError("not_defined",
+                                                HandleVector(&name, 1));
+      return MakePair(isolate->Throw(*reference_error), NULL);
+    } else {
+      return MakePair(Unhole(isolate->heap(), value, attributes), *receiver);
+    }
   }
 
   // If the holder is found, we read the property from it.
@@ -8570,14 +8592,27 @@
   int index;
   PropertyAttributes attributes;
   ContextLookupFlags flags = FOLLOW_CHAINS;
-  Handle<Object> holder = context->Lookup(name, flags, &index, &attributes);
+  BindingFlags binding_flags;
+  Handle<Object> holder = context->Lookup(name,
+                                          flags,
+                                          &index,
+                                          &attributes,
+                                          &binding_flags);
 
   if (index >= 0) {
     if (holder->IsContext()) {
+      Handle<Context> context = Handle<Context>::cast(holder);
+      if (binding_flags == MUTABLE_CHECK_INITIALIZED &&
+          context->get(index)->IsTheHole()) {
+        Handle<Object> error =
+            isolate->factory()->NewReferenceError("not_defined",
+                                                  HandleVector(&name, 1));
+        return isolate->Throw(*error);
+      }
       // Ignore if read_only variable.
       if ((attributes & READ_ONLY) == 0) {
         // Context is a fixed array and set cannot fail.
-        Context::cast(*holder)->set(index, *value);
+        context->set(index, *value);
       } else if (strict_mode == kStrictMode) {
         // Setting read only property in strict mode.
         Handle<Object> error =
@@ -9029,10 +9064,13 @@
   // it is bound in the global context.
   int index = -1;
   PropertyAttributes attributes = ABSENT;
+  BindingFlags binding_flags;
   while (true) {
     receiver = context->Lookup(isolate->factory()->eval_symbol(),
                                FOLLOW_PROTOTYPE_CHAIN,
-                               &index, &attributes);
+                               &index,
+                               &attributes,
+                               &binding_flags);
     // Stop search when eval is found or when the global context is
     // reached.
     if (attributes != ABSENT || context->IsGlobalContext()) break;
diff --git a/src/scanner-base.h b/src/scanner-base.h
index a7062a3..d68d240 100644
--- a/src/scanner-base.h
+++ b/src/scanner-base.h
@@ -362,7 +362,7 @@
   // Call this after setting source_ to the input.
   void Init() {
     // Set c0_ (one character ahead)
-    ASSERT(kCharacterLookaheadBufferSize == 1);
+    STATIC_ASSERT(kCharacterLookaheadBufferSize == 1);
     Advance();
     // Initialize current_ to not refer to a literal.
     current_.literal_chars = NULL;
diff --git a/src/stub-cache.h b/src/stub-cache.h
index a97a4cd..dd06a1c 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -357,7 +357,7 @@
     // shift are equal.  Shifting down the length field to get the
     // hash code would effectively throw away two bits of the hash
     // code.
-    ASSERT(kHeapObjectTagSize == String::kHashShift);
+    STATIC_ASSERT(kHeapObjectTagSize == String::kHashShift);
     // Compute the hash of the name (use entire hash field).
     ASSERT(name->HasHashCode());
     uint32_t field = name->hash_field();
diff --git a/src/token.h b/src/token.h
index 33af7fe..eb825c1 100644
--- a/src/token.h
+++ b/src/token.h
@@ -71,6 +71,7 @@
   /* this block of enum values being contiguous and sorted in the */    \
   /* same order! */                                                     \
   T(INIT_VAR, "=init_var", 2)  /* AST-use only. */                      \
+  T(INIT_LET, "=init_let", 2)  /* AST-use only. */                      \
   T(INIT_CONST, "=init_const", 2)  /* AST-use only. */                  \
   T(ASSIGN, "=", 2)                                                     \
   T(ASSIGN_BIT_OR, "|=", 2)                                             \
diff --git a/src/version.cc b/src/version.cc
index 3fd0fd5..a004ecc 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -33,9 +33,9 @@
 // NOTE these macros are used by the SCons build script so their names
 // cannot be changed without changing the SCons build script.
 #define MAJOR_VERSION     3
-#define MINOR_VERSION     5
-#define BUILD_NUMBER      9
-#define PATCH_LEVEL       1
+#define MINOR_VERSION     6
+#define BUILD_NUMBER      0
+#define PATCH_LEVEL       0
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
diff --git a/src/weakmap.js b/src/weakmap.js
index 70210b9..3d261e5 100644
--- a/src/weakmap.js
+++ b/src/weakmap.js
@@ -81,13 +81,16 @@
 // -------------------------------------------------------------------
 
 function SetupWeakMap() {
-  // Setup the WeakMap constructor function.
+  // Set up the WeakMap constructor function.
   %SetCode($WeakMap, WeakMapConstructor);
 
-  // Setup the WeakMap prototype object.
+  // Set up the WeakMap prototype object.
   %FunctionSetPrototype($WeakMap, new $WeakMap());
 
-  // Setup the non-enumerable functions on the WeakMap prototype object.
+  // Set up the constructor property on the WeakMap prototype object.
+  %SetProperty($WeakMap.prototype, "constructor", $WeakMap, DONT_ENUM);
+
+  // Set up the non-enumerable functions on the WeakMap prototype object.
   InstallFunctionsOnHiddenPrototype($WeakMap.prototype, DONT_ENUM, $Array(
     "get", WeakMapGet,
     "set", WeakMapSet,
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index 0763989..7c6f7e3 100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -139,7 +139,7 @@
     // rdi: constructor
     __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
     // Will both indicate a NULL and a Smi
-    ASSERT(kSmiTag == 0);
+    STATIC_ASSERT(kSmiTag == 0);
     __ JumpIfSmi(rax, &rt_call);
     // rdi: constructor
     // rax: initial map (if proven valid below)
@@ -1283,7 +1283,7 @@
     // Initial map for the builtin Array functions should be maps.
     __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
     // Will both indicate a NULL and a Smi.
-    ASSERT(kSmiTag == 0);
+    STATIC_ASSERT(kSmiTag == 0);
     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
     __ Check(not_smi, "Unexpected initial map for Array function");
     __ CmpObjectType(rbx, MAP_TYPE, rcx);
@@ -1317,7 +1317,7 @@
     // Initial map for the builtin Array function should be a map.
     __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
     // Will both indicate a NULL and a Smi.
-    ASSERT(kSmiTag == 0);
+    STATIC_ASSERT(kSmiTag == 0);
     Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
     __ Check(not_smi, "Unexpected initial map for Array function");
     __ CmpObjectType(rbx, MAP_TYPE, rcx);
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 0b785c5..9237a0a 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -2470,8 +2470,8 @@
   // a sequential string or an external string.
   // In the case of a sliced string its offset has to be taken into account.
   Label cons_string, check_encoding;
-  STATIC_ASSERT((kConsStringTag < kExternalStringTag));
-  STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   __ cmpq(rbx, Immediate(kExternalStringTag));
   __ j(less, &cons_string, Label::kNear);
   __ j(equal, &runtime);
@@ -3903,8 +3903,8 @@
 
   // Handle non-flat strings.
   __ and_(result_, Immediate(kStringRepresentationMask));
-  STATIC_ASSERT((kConsStringTag < kExternalStringTag));
-  STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+  STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+  STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
   __ cmpb(result_, Immediate(kExternalStringTag));
   __ j(greater, &sliced_string);
   __ j(equal, &call_runtime_);
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index dd35053..e77ec64 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -668,12 +668,12 @@
   switch (slot->type()) {
     case Slot::PARAMETER:
     case Slot::LOCAL:
-      if (mode == Variable::CONST) {
-        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
-        __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
-      } else if (function != NULL) {
+      if (function != NULL) {
         VisitForAccumulatorValue(function);
         __ movq(Operand(rbp, SlotOffset(slot)), result_register());
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
+        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
+        __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
       }
       break;
 
@@ -692,16 +692,16 @@
         __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
         __ Check(not_equal, "Declaration in catch context.");
       }
-      if (mode == Variable::CONST) {
-        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
-        __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
-        // No write barrier since the hole value is in old space.
-      } else if (function != NULL) {
+      if (function != NULL) {
         VisitForAccumulatorValue(function);
         __ movq(ContextOperand(rsi, slot->index()), result_register());
         int offset = Context::SlotOffset(slot->index());
         __ movq(rbx, rsi);
         __ RecordWrite(rbx, offset, result_register(), rcx);
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
+        __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
+        __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
+        // No write barrier since the hole value is in old space.
       }
       break;
 
@@ -718,10 +718,10 @@
       // Note: For variables we must not push an initial value (such as
       // 'undefined') because we may have a (legal) redeclaration and we
       // must not destroy the current value.
-      if (mode == Variable::CONST) {
-        __ PushRoot(Heap::kTheHoleValueRootIndex);
-      } else if (function != NULL) {
+      if (function != NULL) {
         VisitForStackValue(function);
+      } else if (mode == Variable::CONST || mode == Variable::LET) {
+        __ PushRoot(Heap::kTheHoleValueRootIndex);
       } else {
         __ Push(Smi::FromInt(0));  // no initial value!
       }
@@ -1246,6 +1246,18 @@
       __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
       __ bind(&done);
       context()->Plug(rax);
+    } else if (var->mode() == Variable::LET) {
+      // Let bindings may be the hole value if they have not been initialized.
+      // Throw a type error in this case.
+      Label done;
+      MemOperand slot_operand = EmitSlotSearch(slot, rax);
+      __ movq(rax, slot_operand);
+      __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
+      __ j(not_equal, &done, Label::kNear);
+      __ Push(var->name());
+      __ CallRuntime(Runtime::kThrowReferenceError, 1);
+      __ bind(&done);
+      context()->Plug(rax);
     } else {
       context()->Plug(slot);
     }
@@ -1775,6 +1787,57 @@
     }
     __ bind(&skip);
 
+  } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
+    // Perform the assignment for non-const variables.  Const assignments
+    // are simply skipped.
+    Slot* slot = var->AsSlot();
+    switch (slot->type()) {
+      case Slot::PARAMETER:
+      case Slot::LOCAL: {
+        Label assign;
+        // Check for an initialized let binding.
+        __ movq(rdx, Operand(rbp, SlotOffset(slot)));
+        __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+        __ j(not_equal, &assign);
+        __ Push(var->name());
+        __ CallRuntime(Runtime::kThrowReferenceError, 1);
+        // Perform the assignment.
+        __ bind(&assign);
+        __ movq(Operand(rbp, SlotOffset(slot)), rax);
+        break;
+      }
+
+      case Slot::CONTEXT: {
+        // Let variables may be the hole value if they have not been
+        // initialized. Throw a type error in this case.
+        Label assign;
+        MemOperand target = EmitSlotSearch(slot, rcx);
+        // Check for an initialized let binding.
+        __ movq(rdx, target);
+        __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+        __ j(not_equal, &assign, Label::kNear);
+        __ Push(var->name());
+        __ CallRuntime(Runtime::kThrowReferenceError, 1);
+        // Perform the assignment.
+        __ bind(&assign);
+        __ movq(target, rax);
+        // The value of the assignment is in eax. RecordWrite clobbers its
+        // register arguments.
+        __ movq(rdx, rax);
+        int offset = Context::SlotOffset(slot->index());
+        __ RecordWrite(rcx, offset, rdx, rbx);
+        break;
+      }
+
+      case Slot::LOOKUP:
+        // Call the runtime for the assignment.
+        __ push(rax);  // Value.
+        __ push(rsi);  // Context.
+        __ Push(var->name());
+        __ Push(Smi::FromInt(strict_mode_flag()));
+        __ CallRuntime(Runtime::kStoreContextSlot, 4);
+        break;
+    }
   } else if (var->mode() != Variable::CONST) {
     // Perform the assignment for non-const variables.  Const assignments
     // are simply skipped.
@@ -3075,7 +3138,7 @@
 
   Label done, not_found;
   // tmp now holds finger offset as a smi.
-  ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
   __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
   SmiIndex index =
       __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 339d2c1..990c171 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -378,7 +378,7 @@
   __ j(zero, index_string);  // The value in hash is used at jump target.
 
   // Is the string a symbol?
-  ASSERT(kSymbolTag != 0);
+  STATIC_ASSERT(kSymbolTag != 0);
   __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
            Immediate(kIsSymbolMask));
   __ j(zero, not_symbol);
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 76a9453..c182413 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -3217,8 +3217,6 @@
 
   // Dispatch on the indirect string shape: slice or cons.
   Label cons_string;
-  const uint32_t kSlicedNotConsMask = kSlicedStringTag & ~kConsStringTag;
-  ASSERT(IsPowerOf2(kSlicedNotConsMask) && kSlicedNotConsMask != 0);
   __ testb(result, Immediate(kSlicedNotConsMask));
   __ j(zero, &cons_string, Label::kNear);
 
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index e4a7627..b51d531 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -923,7 +923,7 @@
 
 
 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   if (!dst.is(src)) {
     movl(dst, src);
   }
@@ -961,7 +961,7 @@
 
 
 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   if (!dst.is(src)) {
     movq(dst, src);
   }
@@ -975,7 +975,7 @@
 
 
 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   if (!dst.is(src)) {
     movq(dst, src);
   }
@@ -1111,21 +1111,21 @@
 
 
 Condition MacroAssembler::CheckSmi(Register src) {
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   testb(src, Immediate(kSmiTagMask));
   return zero;
 }
 
 
 Condition MacroAssembler::CheckSmi(const Operand& src) {
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   testb(src, Immediate(kSmiTagMask));
   return zero;
 }
 
 
 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   // Test that both bits of the mask 0x8000000000000001 are zero.
   movq(kScratchRegister, src);
   rol(kScratchRegister, Immediate(1));
@@ -1138,7 +1138,7 @@
   if (first.is(second)) {
     return CheckSmi(first);
   }
-  ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
+  STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
   leal(kScratchRegister, Operand(first, second, times_1, 0));
   testb(kScratchRegister, Immediate(0x03));
   return zero;
@@ -1294,7 +1294,7 @@
                                        Label::Distance near_jump) {
   // Does not assume that src is a smi.
   ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   ASSERT(!dst.is(kScratchRegister));
   ASSERT(!src.is(kScratchRegister));
 
@@ -1998,7 +1998,7 @@
     Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
   }
 #endif
-  ASSERT_EQ(0, kSmiTag);
+  STATIC_ASSERT(kSmiTag == 0);
   ASSERT_EQ(0, Smi::FromInt(0));
   movl(kScratchRegister, Immediate(kSmiTagMask));
   and_(kScratchRegister, src1);
@@ -2699,7 +2699,7 @@
                                              Register instance_type) {
   movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
   movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
-  ASSERT(kNotStringTag != 0);
+  STATIC_ASSERT(kNotStringTag != 0);
   testb(instance_type, Immediate(kIsNotStringMask));
   return zero;
 }
diff --git a/src/x64/regexp-macro-assembler-x64.cc b/src/x64/regexp-macro-assembler-x64.cc
index 7f80447..a782bd7 100644
--- a/src/x64/regexp-macro-assembler-x64.cc
+++ b/src/x64/regexp-macro-assembler-x64.cc
@@ -1185,7 +1185,7 @@
   MaybeObject* result = Execution::HandleStackGuardInterrupt();
 
   if (*code_handle != re_code) {  // Return address no longer valid
-    intptr_t delta = *code_handle - re_code;
+    intptr_t delta = code_handle->address() - re_code->address();
     // Overwrite the return address on the stack.
     *return_address += delta;
   }
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index cfd19bf..5ea7257 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -258,7 +258,7 @@
   // Check that the object is a string.
   __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
   __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
-  ASSERT(kNotStringTag != 0);
+  STATIC_ASSERT(kNotStringTag != 0);
   __ testl(scratch, Immediate(kNotStringTag));
   __ j(not_zero, non_string_object);
 }
@@ -3070,7 +3070,7 @@
   // Load the initial map and verify that it is in fact a map.
   __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
   // Will both indicate a NULL and a Smi.
-  ASSERT(kSmiTag == 0);
+  STATIC_ASSERT(kSmiTag == 0);
   __ JumpIfSmi(rbx, &generic_stub_call);
   __ CmpObjectType(rbx, MAP_TYPE, rcx);
   __ j(not_equal, &generic_stub_call);
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index f2af81e..c0b8a4e 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -823,7 +823,7 @@
 static v8::Handle<v8::Value> callback(const v8::Arguments& args) {
   void* ptr = v8::External::Unwrap(args.Data());
   CHECK_EQ(expected_ptr, ptr);
-  return v8::Boolean::New(true);
+  return v8::True();
 }
 
 
@@ -2609,7 +2609,7 @@
 
 
 v8::Handle<Value> CCatcher(const v8::Arguments& args) {
-  if (args.Length() < 1) return v8::Boolean::New(false);
+  if (args.Length() < 1) return v8::False();
   v8::HandleScope scope;
   v8::TryCatch try_catch;
   Local<Value> result = v8::Script::Compile(args[0]->ToString())->Run();
@@ -7296,7 +7296,7 @@
     CHECK(value->IsBoolean());
     CHECK_EQ(true, value->BooleanValue());
 
-    Handle<Value> args3[] = { v8::Boolean::New(true) };
+    Handle<Value> args3[] = { v8::True() };
     Local<Value> value_obj3 = instance->CallAsConstructor(1, args3);
     CHECK(value_obj3->IsObject());
     Local<Object> object3 = Local<Object>::Cast(value_obj3);
@@ -9567,10 +9567,7 @@
 
 static v8::Handle<Value> IsConstructHandler(const v8::Arguments& args) {
   ApiTestFuzzer::Fuzz();
-  if (args.IsConstructCall()) {
-    return v8::Boolean::New(true);
-  }
-  return v8::Boolean::New(false);
+  return v8::Boolean::New(args.IsConstructCall());
 }
 
 
diff --git a/test/cctest/test-assembler-arm.cc b/test/cctest/test-assembler-arm.cc
index 1703203..ecbf956 100644
--- a/test/cctest/test-assembler-arm.cc
+++ b/test/cctest/test-assembler-arm.cc
@@ -1010,4 +1010,18 @@
   CHECK_EQ(0xffffffff, i.d);
 }
 
+
+TEST(12) {
+  // Test chaining of label usages within instructions (issue 1644).
+  InitializeVM();
+  v8::HandleScope scope;
+  Assembler assm(Isolate::Current(), NULL, 0);
+
+  Label target;
+  __ b(eq, &target);
+  __ b(ne, &target);
+  __ bind(&target);
+  __ nop();
+}
+
 #undef __
diff --git a/test/cctest/test-assembler-ia32.cc b/test/cctest/test-assembler-ia32.cc
index e9d799b..839b7f5 100644
--- a/test/cctest/test-assembler-ia32.cc
+++ b/test/cctest/test-assembler-ia32.cc
@@ -394,4 +394,18 @@
   CHECK_EQ(kNaN, f(OS::nan_value(), 1.1));
 }
 
+
+TEST(AssemblerIa3210) {
+  // Test chaining of label usages within instructions (issue 1644).
+  InitializeVM();
+  v8::HandleScope scope;
+  Assembler assm(Isolate::Current(), NULL, 0);
+
+  Label target;
+  __ j(equal, &target);
+  __ j(not_equal, &target);
+  __ bind(&target);
+  __ nop();
+}
+
 #undef __
diff --git a/test/cctest/test-assembler-mips.cc b/test/cctest/test-assembler-mips.cc
index 065569d..ca11a2a 100644
--- a/test/cctest/test-assembler-mips.cc
+++ b/test/cctest/test-assembler-mips.cc
@@ -1259,4 +1259,18 @@
   }
 }
 
+
+TEST(MIPS15) {
+  // Test chaining of label usages within instructions (issue 1644).
+  InitializeVM();
+  v8::HandleScope scope;
+  Assembler assm(Isolate::Current(), NULL, 0);
+
+  Label target;
+  __ beq(v0, v1, &target);
+  __ bne(v0, v1, &target);
+  __ bind(&target);
+  __ nop();
+}
+
 #undef __
diff --git a/test/cctest/test-assembler-x64.cc b/test/cctest/test-assembler-x64.cc
index ea70f54..28f7c9b 100644
--- a/test/cctest/test-assembler-x64.cc
+++ b/test/cctest/test-assembler-x64.cc
@@ -46,6 +46,7 @@
 using v8::internal::byte;
 using v8::internal::greater;
 using v8::internal::less_equal;
+using v8::internal::equal;
 using v8::internal::not_equal;
 using v8::internal::r13;
 using v8::internal::r15;
@@ -345,4 +346,17 @@
   }
 }
 
+
+TEST(AssemblerX64LabelChaining) {
+  // Test chaining of label usages within instructions (issue 1644).
+  v8::HandleScope scope;
+  Assembler assm(Isolate::Current(), NULL, 0);
+
+  Label target;
+  __ j(equal, &target);
+  __ j(not_equal, &target);
+  __ bind(&target);
+  __ nop();
+}
+
 #undef __
diff --git a/test/mjsunit/harmony/block-let-crankshaft.js b/test/mjsunit/harmony/block-let-crankshaft.js
new file mode 100644
index 0000000..c2fb96b
--- /dev/null
+++ b/test/mjsunit/harmony/block-let-crankshaft.js
@@ -0,0 +1,63 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-block-scoping --allow-natives-syntax
+
+// Test that temporal dead zone semantics for function and block scoped
+// ket bindings are handled by the optimizing compiler.
+
+function f(x, b) {
+  let y = (b ? y : x) + 42;
+  return y;
+}
+
+function g(x, b) {
+  {
+    let y = (b ? y : x) + 42;
+    return y;
+  }
+}
+
+for (var i=0; i<10; i++) {
+  f(i, false);
+  g(i, false);
+}
+
+%OptimizeFunctionOnNextCall(f);
+%OptimizeFunctionOnNextCall(g);
+
+try {
+  f(42, true);
+} catch (e) {
+  assertInstanceof(e, ReferenceError);
+}
+
+try {
+  g(42, true);
+} catch (e) {
+  assertInstanceof(e, ReferenceError);
+}
diff --git a/test/mjsunit/harmony/block-let-semantics.js b/test/mjsunit/harmony/block-let-semantics.js
new file mode 100644
index 0000000..198c3b4
--- /dev/null
+++ b/test/mjsunit/harmony/block-let-semantics.js
@@ -0,0 +1,138 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-block-scoping
+
+// Test temporal dead zone semantics of let bound variables in
+// function and block scopes.
+
+function TestFunctionLocal(s) {
+  try {
+    eval("(function(){" + s + "; })")();
+  } catch (e) {
+    assertInstanceof(e, ReferenceError);
+    return;
+  }
+  assertUnreachable();
+}
+
+function TestBlockLocal(s,e) {
+  try {
+    eval("(function(){ {" + s + ";} })")();
+  } catch (e) {
+    assertInstanceof(e, ReferenceError);
+    return;
+  }
+  assertUnreachable();
+}
+
+
+function TestAll(s) {
+  TestBlockLocal(s);
+  TestFunctionLocal(s);
+}
+
+// Use before initialization in declaration statement.
+TestAll('let x = x + 1');
+TestAll('let x = x += 1');
+TestAll('let x = x++');
+TestAll('let x = ++x');
+
+// Use before initialization in prior statement.
+TestAll('x + 1; let x;');
+TestAll('x = 1; let x;');
+TestAll('x += 1; let x;');
+TestAll('++x; let x;');
+TestAll('x++; let x;');
+
+TestAll('f(); let x; function f() { return x + 1; }');
+TestAll('f(); let x; function f() { x = 1; }');
+TestAll('f(); let x; function f() { x += 1; }');
+TestAll('f(); let x; function f() { ++x; }');
+TestAll('f(); let x; function f() { x++; }');
+
+TestAll('f()(); let x; function f() { return function() { return x + 1; } }');
+TestAll('f()(); let x; function f() { return function() { x = 1; } }');
+TestAll('f()(); let x; function f() { return function() { x += 1; } }');
+TestAll('f()(); let x; function f() { return function() { ++x; } }');
+TestAll('f()(); let x; function f() { return function() { x++; } }');
+
+// Use in before initialization with a dynamic lookup.
+TestAll('eval("x + 1;"); let x;');
+TestAll('eval("x = 1;"); let x;');
+TestAll('eval("x += 1;"); let x;');
+TestAll('eval("++x;"); let x;');
+TestAll('eval("x++;"); let x;');
+
+// Test that variables introduced by function declarations are created and
+// initialized upon entering a function / block scope.
+function f() {
+  {
+    assertEquals(2, g1());
+    assertEquals(2, eval("g1()"));
+
+    // block scoped function declaration
+    function g1() {
+      return 2;
+    }
+  }
+
+  assertEquals(3, g2());
+  assertEquals(3, eval("g2()"));
+  // function scoped function declaration
+  function g2() {
+    return 3;
+  }
+}
+f();
+
+// Test that a function declaration introduces a block scoped variable.
+TestAll('{ function k() { return 0; } }; k(); ');
+
+// Test that a function declaration sees the scope it resides in.
+function f2() {
+  let m, n;
+  {
+    m = g;
+    function g() {
+      return a;
+    }
+    let a = 1;
+  }
+  assertEquals(1, m());
+
+  try {
+    throw 2;
+  } catch(b) {
+    n = h;
+    function h() {
+      return b + c;
+    }
+    let b = 3;
+  }
+  assertEquals(5, n());
+}
diff --git a/test/mjsunit/harmony/weakmaps.js b/test/mjsunit/harmony/weakmaps.js
index 97f553c..e43f916 100644
--- a/test/mjsunit/harmony/weakmaps.js
+++ b/test/mjsunit/harmony/weakmaps.js
@@ -137,6 +137,7 @@
 assertTrue(WeakMap.prototype.get instanceof Function)
 assertTrue(WeakMap.prototype.has instanceof Function)
 assertTrue(WeakMap.prototype.delete instanceof Function)
+assertTrue(WeakMap.prototype.constructor === WeakMap)
 
 
 // Regression test for issue 1617: The prototype of the WeakMap constructor
diff --git a/test/mjsunit/string-slices.js b/test/mjsunit/string-slices.js
index b0b05ec..f629ca9 100755
--- a/test/mjsunit/string-slices.js
+++ b/test/mjsunit/string-slices.js
@@ -72,7 +72,7 @@
 }
 /x/.exec(x);  // Try to force a flatten.
 for (var i = 5; i < 25; i++) {
-  for (var j = 12; j < 25; j++) {
+  for (var j = 0; j < 25; j++) {
     var z = x.substring(i, i+j);
     var w = Math.random() * 42;  // Allocate something new in new-space.
     assertEquals(j, z.length);
@@ -110,7 +110,7 @@
 x += x;
 var xl = x.length;
 var cache = [];
-for (var i = 0; i < 10000; i++) {
+for (var i = 0; i < 1000; i++) {
   var z = x.substring(i % xl);
   assertEquals(xl - (i % xl), z.length);
   cache.push(z);
@@ -129,7 +129,7 @@
 x += x;
 var xl = x.length;
 var cache = [];
-for (var i = 0; i < 10000; i++) {
+for (var i = 0; i < 1000; i++) {
   var z = x.substring(i % xl);
   assertEquals(xl - (i % xl), z.length);
   cache.push(z);
@@ -149,6 +149,7 @@
   var z = cache.pop();
   assertTrue(/\u2028123456789ABCDEF/.test(z));
   assertEquals(xl - offset, z.length);
+  assertEquals(x.charAt(i*(i+1)/2), z.charAt(0));
   offset -= i;
 }
 
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index 56ebeed..2da8213 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -646,7 +646,7 @@
               ],
               'msvs_disabled_warnings': [4351, 4355, 4800],
               'link_settings':  {
-                'libraries': [ '-lwinmm.lib' ],
+                'libraries': [ '-lwinmm.lib', '-lws2_32.lib' ],
               },
             }],
             ['component=="shared_library"', {