Version 3.1.8.
Fixed a number of crash bugs.
Improved Crankshaft for x64 and ARM.
Implemented more of EcmaScript 5 strict mode.
Fixed issue with unaligned reads and writes on ARM.
Improved heap profiler support.
git-svn-id: http://v8.googlecode.com/svn/trunk@7023 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/ChangeLog b/ChangeLog
index e4b018c..7936058 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,10 +1,23 @@
+2011-03-02: Version 3.1.8
+
+ Fixed a number of crash bugs.
+
+ Improved Crankshaft for x64 and ARM.
+
+ Implemented more of EcmaScript 5 strict mode.
+
+ Fixed issue with unaligned reads and writes on ARM.
+
+ Improved heap profiler support.
+
+
2011-02-28: Version 3.1.7
Fixed a number of crash bugs.
Improved Crankshaft for x64 and ARM.
- Fixed implementation of indexOf/lastIndexOf for sparse
+ Fixed implementation of indexOf/lastIndexOf for sparse
arrays (http://crbug.com/73940).
Fixed bug in map space compaction (http://crbug.com/59688).
diff --git a/src/api.cc b/src/api.cc
index d718c88..b77e450 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -2286,7 +2286,8 @@
self,
key_obj,
value_obj,
- static_cast<PropertyAttributes>(attribs));
+ static_cast<PropertyAttributes>(attribs),
+ i::kNonStrictMode);
has_pending_exception = obj.is_null();
EXCEPTION_BAILOUT_CHECK(false);
return true;
@@ -2711,7 +2712,8 @@
hidden_props,
key_obj,
value_obj,
- static_cast<PropertyAttributes>(None));
+ static_cast<PropertyAttributes>(None),
+ i::kNonStrictMode);
has_pending_exception = obj.is_null();
EXCEPTION_BAILOUT_CHECK(false);
return true;
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 954b9cf..f5eb507 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -284,6 +284,7 @@
const SwVfpRegister s30 = { 30 };
const SwVfpRegister s31 = { 31 };
+const DwVfpRegister no_dreg = { -1 };
const DwVfpRegister d0 = { 0 };
const DwVfpRegister d1 = { 1 };
const DwVfpRegister d2 = { 2 };
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 7d374ee..62eb3e6 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -398,8 +398,11 @@
Label* not_number);
// Loads the number from object into dst as a 32-bit integer if possible. If
- // the object is not a 32-bit integer control continues at the label
- // not_int32. If VFP is supported double_scratch is used but not scratch2.
+ // the object cannot be converted to a 32-bit integer control continues at
+ // the label not_int32. If VFP is supported double_scratch is used
+ // but not scratch2.
+ // Floating point value in the 32-bit integer range will be rounded
+ // to an integer.
static void LoadNumberAsInteger(MacroAssembler* masm,
Register object,
Register dst,
@@ -409,6 +412,76 @@
DwVfpRegister double_scratch,
Label* not_int32);
+ // Load the number from object into double_dst in the double format.
+ // Control will jump to not_int32 if the value cannot be exactly represented
+ // by a 32-bit integer.
+ // Floating point value in the 32-bit integer range that are not exact integer
+ // won't be loaded.
+ static void LoadNumberAsInt32Double(MacroAssembler* masm,
+ Register object,
+ Destination destination,
+ DwVfpRegister double_dst,
+ Register dst1,
+ Register dst2,
+ Register heap_number_map,
+ Register scratch1,
+ Register scratch2,
+ SwVfpRegister single_scratch,
+ Label* not_int32);
+
+ // Loads the number from object into dst as a 32-bit integer.
+ // Control will jump to not_int32 if the object cannot be exactly represented
+ // by a 32-bit integer.
+ // Floating point value in the 32-bit integer range that are not exact integer
+ // won't be converted.
+ // scratch3 is not used when VFP3 is supported.
+ static void LoadNumberAsInt32(MacroAssembler* masm,
+ Register object,
+ Register dst,
+ Register heap_number_map,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ DwVfpRegister double_scratch,
+ Label* not_int32);
+
+ // Generate non VFP3 code to check if a double can be exactly represented by a
+ // 32-bit integer. This does not check for 0 or -0, which need
+ // to be checked for separately.
+ // Control jumps to not_int32 if the value is not a 32-bit integer, and falls
+ // through otherwise.
+ // src1 and src2 will be cloberred.
+ //
+ // Expected input:
+ // - src1: higher (exponent) part of the double value.
+ // - src2: lower (mantissa) part of the double value.
+ // Output status:
+ // - dst: 32 higher bits of the mantissa. (mantissa[51:20])
+ // - src2: contains 1.
+ // - other registers are clobbered.
+ static void DoubleIs32BitInteger(MacroAssembler* masm,
+ Register src1,
+ Register src2,
+ Register dst,
+ Register scratch,
+ Label* not_int32);
+
+ // Generates code to call a C function to do a double operation using core
+ // registers. (Used when VFP3 is not supported.)
+ // This code never falls through, but returns with a heap number containing
+ // the result in r0.
+ // Register heapnumber_result must be a heap number in which the
+ // result of the operation will be stored.
+ // Requires the following layout on entry:
+ // r0: Left value (least significant part of mantissa).
+ // r1: Left value (sign, exponent, top of mantissa).
+ // r2: Right value (least significant part of mantissa).
+ // r3: Right value (sign, exponent, top of mantissa).
+ static void CallCCodeForDoubleOperation(MacroAssembler* masm,
+ Token::Value op,
+ Register heap_number_result,
+ Register scratch);
+
private:
static void LoadNumber(MacroAssembler* masm,
FloatingPointHelper::Destination destination,
@@ -560,6 +633,319 @@
}
+void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
+ Register object,
+ Destination destination,
+ DwVfpRegister double_dst,
+ Register dst1,
+ Register dst2,
+ Register heap_number_map,
+ Register scratch1,
+ Register scratch2,
+ SwVfpRegister single_scratch,
+ Label* not_int32) {
+ ASSERT(!scratch1.is(object) && !scratch2.is(object));
+ ASSERT(!scratch1.is(scratch2));
+ ASSERT(!heap_number_map.is(object) &&
+ !heap_number_map.is(scratch1) &&
+ !heap_number_map.is(scratch2));
+
+ Label done, obj_is_not_smi;
+
+ __ JumpIfNotSmi(object, &obj_is_not_smi);
+ __ SmiUntag(scratch1, object);
+ if (CpuFeatures::IsSupported(VFP3)) {
+ CpuFeatures::Scope scope(VFP3);
+ __ vmov(single_scratch, scratch1);
+ __ vcvt_f64_s32(double_dst, single_scratch);
+ if (destination == kCoreRegisters) {
+ __ vmov(dst1, dst2, double_dst);
+ }
+ } else {
+ Label fewer_than_20_useful_bits;
+ // Expected output:
+ // | dst1 | dst2 |
+ // | s | exp | mantissa |
+
+ // Check for zero.
+ __ cmp(scratch1, Operand(0));
+ __ mov(dst1, scratch1);
+ __ mov(dst2, scratch1);
+ __ b(eq, &done);
+
+ // Preload the sign of the value.
+ __ and_(dst1, scratch1, Operand(HeapNumber::kSignMask), SetCC);
+ // Get the absolute value of the object (as an unsigned integer).
+ __ rsb(scratch1, scratch1, Operand(0), SetCC, mi);
+
+ // Get mantisssa[51:20].
+
+ // Get the position of the first set bit.
+ __ CountLeadingZeros(dst2, scratch1, scratch2);
+ __ rsb(dst2, dst2, Operand(31));
+
+ // Set the exponent.
+ __ add(scratch2, dst2, Operand(HeapNumber::kExponentBias));
+ __ Bfi(dst1, scratch2, scratch2,
+ HeapNumber::kExponentShift, HeapNumber::kExponentBits);
+
+ // Clear the first non null bit.
+ __ mov(scratch2, Operand(1));
+ __ bic(scratch1, scratch1, Operand(scratch2, LSL, dst2));
+
+ __ cmp(dst2, Operand(HeapNumber::kMantissaBitsInTopWord));
+ // Get the number of bits to set in the lower part of the mantissa.
+ __ sub(scratch2, dst2, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
+ __ b(mi, &fewer_than_20_useful_bits);
+ // Set the higher 20 bits of the mantissa.
+ __ orr(dst1, dst1, Operand(scratch1, LSR, scratch2));
+ __ rsb(scratch2, scratch2, Operand(32));
+ __ mov(dst2, Operand(scratch1, LSL, scratch2));
+ __ b(&done);
+
+ __ bind(&fewer_than_20_useful_bits);
+ __ rsb(scratch2, dst2, Operand(HeapNumber::kMantissaBitsInTopWord));
+ __ mov(scratch2, Operand(scratch1, LSL, scratch2));
+ __ orr(dst1, dst1, scratch2);
+ // Set dst2 to 0.
+ __ mov(dst2, Operand(0));
+ }
+
+ __ b(&done);
+
+ __ bind(&obj_is_not_smi);
+ if (FLAG_debug_code) {
+ __ AbortIfNotRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
+ }
+ __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
+
+ // Load the number.
+ if (CpuFeatures::IsSupported(VFP3)) {
+ CpuFeatures::Scope scope(VFP3);
+ // Load the double value.
+ __ sub(scratch1, object, Operand(kHeapObjectTag));
+ __ vldr(double_dst, scratch1, HeapNumber::kValueOffset);
+
+ __ EmitVFPTruncate(kRoundToZero,
+ single_scratch,
+ double_dst,
+ scratch1,
+ scratch2,
+ kCheckForInexactConversion);
+
+ // Jump to not_int32 if the operation did not succeed.
+ __ b(ne, not_int32);
+
+ if (destination == kCoreRegisters) {
+ __ vmov(dst1, dst2, double_dst);
+ }
+
+ } else {
+ ASSERT(!scratch1.is(object) && !scratch2.is(object));
+ // Load the double value in the destination registers..
+ __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset));
+
+ // Check for 0 and -0.
+ __ bic(scratch1, dst1, Operand(HeapNumber::kSignMask));
+ __ orr(scratch1, scratch1, Operand(dst2));
+ __ cmp(scratch1, Operand(0));
+ __ b(eq, &done);
+
+ // Check that the value can be exactly represented by a 32-bit integer.
+ // Jump to not_int32 if that's not the case.
+ DoubleIs32BitInteger(masm, dst1, dst2, scratch1, scratch2, not_int32);
+
+ // dst1 and dst2 were trashed. Reload the double value.
+ __ Ldrd(dst1, dst2, FieldMemOperand(object, HeapNumber::kValueOffset));
+ }
+
+ __ bind(&done);
+}
+
+
+void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
+ Register object,
+ Register dst,
+ Register heap_number_map,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ DwVfpRegister double_scratch,
+ Label* not_int32) {
+ ASSERT(!dst.is(object));
+ ASSERT(!scratch1.is(object) && !scratch2.is(object) && !scratch3.is(object));
+ ASSERT(!scratch1.is(scratch2) &&
+ !scratch1.is(scratch3) &&
+ !scratch2.is(scratch3));
+
+ Label done;
+
+ // Untag the object into the destination register.
+ __ SmiUntag(dst, object);
+ // Just return if the object is a smi.
+ __ JumpIfSmi(object, &done);
+
+ if (FLAG_debug_code) {
+ __ AbortIfNotRootValue(heap_number_map,
+ Heap::kHeapNumberMapRootIndex,
+ "HeapNumberMap register clobbered.");
+ }
+ __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
+
+ // Object is a heap number.
+ // Convert the floating point value to a 32-bit integer.
+ if (CpuFeatures::IsSupported(VFP3)) {
+ CpuFeatures::Scope scope(VFP3);
+ SwVfpRegister single_scratch = double_scratch.low();
+ // Load the double value.
+ __ sub(scratch1, object, Operand(kHeapObjectTag));
+ __ vldr(double_scratch, scratch1, HeapNumber::kValueOffset);
+
+ __ EmitVFPTruncate(kRoundToZero,
+ single_scratch,
+ double_scratch,
+ scratch1,
+ scratch2,
+ kCheckForInexactConversion);
+
+ // Jump to not_int32 if the operation did not succeed.
+ __ b(ne, not_int32);
+ // Get the result in the destination register.
+ __ vmov(dst, single_scratch);
+
+ } else {
+ // Load the double value in the destination registers.
+ __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
+ __ ldr(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset));
+
+ // Check for 0 and -0.
+ __ bic(dst, scratch1, Operand(HeapNumber::kSignMask));
+ __ orr(dst, scratch2, Operand(dst));
+ __ cmp(dst, Operand(0));
+ __ b(eq, &done);
+
+ DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32);
+
+ // Registers state after DoubleIs32BitInteger.
+ // dst: mantissa[51:20].
+ // scratch2: 1
+
+ // Shift back the higher bits of the mantissa.
+ __ mov(dst, Operand(dst, LSR, scratch3));
+ // Set the implicit first bit.
+ __ rsb(scratch3, scratch3, Operand(32));
+ __ orr(dst, dst, Operand(scratch2, LSL, scratch3));
+ // Set the sign.
+ __ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
+ __ tst(scratch1, Operand(HeapNumber::kSignMask));
+ __ rsb(dst, dst, Operand(0), LeaveCC, mi);
+ }
+
+ __ bind(&done);
+}
+
+
+void FloatingPointHelper::DoubleIs32BitInteger(MacroAssembler* masm,
+ Register src1,
+ Register src2,
+ Register dst,
+ Register scratch,
+ Label* not_int32) {
+ // Get exponent alone in scratch.
+ __ Ubfx(scratch,
+ src1,
+ HeapNumber::kExponentShift,
+ HeapNumber::kExponentBits);
+
+ // Substract the bias from the exponent.
+ __ sub(scratch, scratch, Operand(HeapNumber::kExponentBias), SetCC);
+
+ // src1: higher (exponent) part of the double value.
+ // src2: lower (mantissa) part of the double value.
+ // scratch: unbiased exponent.
+
+ // Fast cases. Check for obvious non 32-bit integer values.
+ // Negative exponent cannot yield 32-bit integers.
+ __ b(mi, not_int32);
+ // Exponent greater than 31 cannot yield 32-bit integers.
+ // Also, a positive value with an exponent equal to 31 is outside of the
+ // signed 32-bit integer range.
+ __ tst(src1, Operand(HeapNumber::kSignMask));
+ __ cmp(scratch, Operand(30), eq); // Executed for positive. If exponent is 30
+ // the gt condition will be "correct" and
+ // the next instruction will be skipped.
+ __ cmp(scratch, Operand(31), ne); // Executed for negative and positive where
+ // exponent is not 30.
+ __ b(gt, not_int32);
+ // - Bits [21:0] in the mantissa are not null.
+ __ tst(src2, Operand(0x3fffff));
+ __ b(ne, not_int32);
+
+ // Otherwise the exponent needs to be big enough to shift left all the
+ // non zero bits left. So we need the (30 - exponent) last bits of the
+ // 31 higher bits of the mantissa to be null.
+ // Because bits [21:0] are null, we can check instead that the
+ // (32 - exponent) last bits of the 32 higher bits of the mantisssa are null.
+
+ // Get the 32 higher bits of the mantissa in dst.
+ __ Ubfx(dst,
+ src2,
+ HeapNumber::kMantissaBitsInTopWord,
+ 32 - HeapNumber::kMantissaBitsInTopWord);
+ __ orr(dst,
+ dst,
+ Operand(src1, LSL, HeapNumber::kNonMantissaBitsInTopWord));
+
+ // Create the mask and test the lower bits (of the higher bits).
+ __ rsb(scratch, scratch, Operand(32));
+ __ mov(src2, Operand(1));
+ __ mov(src1, Operand(src2, LSL, scratch));
+ __ sub(src1, src1, Operand(1));
+ __ tst(dst, src1);
+ __ b(ne, not_int32);
+}
+
+
+void FloatingPointHelper::CallCCodeForDoubleOperation(
+ MacroAssembler* masm,
+ Token::Value op,
+ Register heap_number_result,
+ Register scratch) {
+ // Using core registers:
+ // r0: Left value (least significant part of mantissa).
+ // r1: Left value (sign, exponent, top of mantissa).
+ // r2: Right value (least significant part of mantissa).
+ // r3: Right value (sign, exponent, top of mantissa).
+
+ // Assert that heap_number_result is callee-saved.
+ // We currently always use r5 to pass it.
+ ASSERT(heap_number_result.is(r5));
+
+ // Push the current return address before the C call. Return will be
+ // through pop(pc) below.
+ __ push(lr);
+ __ PrepareCallCFunction(4, scratch); // Two doubles are 4 arguments.
+ // Call C routine that may not cause GC or other trouble.
+ __ CallCFunction(ExternalReference::double_fp_operation(op), 4);
+ // Store answer in the overwritable heap number.
+#if !defined(USE_ARM_EABI)
+ // Double returned in fp coprocessor register 0 and 1, encoded as
+ // register cr8. Offsets must be divisible by 4 for coprocessor so we
+ // need to substract the tag from heap_number_result.
+ __ sub(scratch, heap_number_result, Operand(kHeapObjectTag));
+ __ stc(p1, cr8, MemOperand(scratch, HeapNumber::kValueOffset));
+#else
+ // Double returned in registers 0 and 1.
+ __ Strd(r0, r1, FieldMemOperand(heap_number_result,
+ HeapNumber::kValueOffset));
+#endif
+ // Place heap_number_result in r0 and return to the pushed return address.
+ __ mov(r0, Operand(heap_number_result));
+ __ pop(pc);
+}
+
// See comment for class.
void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
@@ -2707,33 +3093,11 @@
__ add(r0, r0, Operand(kHeapObjectTag));
__ Ret();
} else {
- // Using core registers:
- // r0: Left value (least significant part of mantissa).
- // r1: Left value (sign, exponent, top of mantissa).
- // r2: Right value (least significant part of mantissa).
- // r3: Right value (sign, exponent, top of mantissa).
-
- // Push the current return address before the C call. Return will be
- // through pop(pc) below.
- __ push(lr);
- __ PrepareCallCFunction(4, scratch1); // Two doubles are 4 arguments.
- // Call C routine that may not cause GC or other trouble. r5 is callee
- // save.
- __ CallCFunction(ExternalReference::double_fp_operation(op_), 4);
- // Store answer in the overwritable heap number.
-#if !defined(USE_ARM_EABI)
- // Double returned in fp coprocessor register 0 and 1, encoded as
- // register cr8. Offsets must be divisible by 4 for coprocessor so we
- // need to substract the tag from r5.
- __ sub(scratch1, result, Operand(kHeapObjectTag));
- __ stc(p1, cr8, MemOperand(scratch1, HeapNumber::kValueOffset));
-#else
- // Double returned in registers 0 and 1.
- __ Strd(r0, r1, FieldMemOperand(result, HeapNumber::kValueOffset));
-#endif
- // Plase result in r0 and return to the pushed return address.
- __ mov(r0, Operand(result));
- __ pop(pc);
+ // Call the C function to handle the double operation.
+ FloatingPointHelper::CallCCodeForDoubleOperation(masm,
+ op_,
+ result,
+ scratch1);
}
break;
}
@@ -2779,7 +3143,6 @@
break;
case Token::SAR:
// Use only the 5 least significant bits of the shift count.
- __ and_(r2, r2, Operand(0x1f));
__ GetLeastBitsFromInt32(r2, r2, 5);
__ mov(r2, Operand(r3, ASR, r2));
break;
@@ -2924,7 +3287,288 @@
void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
ASSERT(operands_type_ == TRBinaryOpIC::INT32);
- GenerateTypeTransition(masm);
+ Register left = r1;
+ Register right = r0;
+ Register scratch1 = r7;
+ Register scratch2 = r9;
+ DwVfpRegister double_scratch = d0;
+ SwVfpRegister single_scratch = s3;
+
+ Register heap_number_result = no_reg;
+ Register heap_number_map = r6;
+ __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
+
+ Label call_runtime;
+ // Labels for type transition, used for wrong input or output types.
+ // Both label are currently actually bound to the same position. We use two
+ // different label to differentiate the cause leading to type transition.
+ Label transition;
+
+ // Smi-smi fast case.
+ Label skip;
+ __ orr(scratch1, left, right);
+ __ JumpIfNotSmi(scratch1, &skip);
+ GenerateSmiSmiOperation(masm);
+ // Fall through if the result is not a smi.
+ __ bind(&skip);
+
+ switch (op_) {
+ case Token::ADD:
+ case Token::SUB:
+ case Token::MUL:
+ case Token::DIV:
+ case Token::MOD: {
+ // Load both operands and check that they are 32-bit integer.
+ // Jump to type transition if they are not. The registers r0 and r1 (right
+ // and left) are preserved for the runtime call.
+ FloatingPointHelper::Destination destination =
+ CpuFeatures::IsSupported(VFP3) && op_ != Token::MOD ?
+ FloatingPointHelper::kVFPRegisters :
+ FloatingPointHelper::kCoreRegisters;
+
+ FloatingPointHelper::LoadNumberAsInt32Double(masm,
+ right,
+ destination,
+ d7,
+ r2,
+ r3,
+ heap_number_map,
+ scratch1,
+ scratch2,
+ s0,
+ &transition);
+ FloatingPointHelper::LoadNumberAsInt32Double(masm,
+ left,
+ destination,
+ d6,
+ r4,
+ r5,
+ heap_number_map,
+ scratch1,
+ scratch2,
+ s0,
+ &transition);
+
+ if (destination == FloatingPointHelper::kVFPRegisters) {
+ CpuFeatures::Scope scope(VFP3);
+ Label return_heap_number;
+ switch (op_) {
+ case Token::ADD:
+ __ vadd(d5, d6, d7);
+ break;
+ case Token::SUB:
+ __ vsub(d5, d6, d7);
+ break;
+ case Token::MUL:
+ __ vmul(d5, d6, d7);
+ break;
+ case Token::DIV:
+ __ vdiv(d5, d6, d7);
+ break;
+ default:
+ UNREACHABLE();
+ }
+
+ if (op_ != Token::DIV) {
+ // These operations produce an integer result.
+ // Try to return a smi if we can.
+ // Otherwise return a heap number if allowed, or jump to type
+ // transition.
+
+ __ EmitVFPTruncate(kRoundToZero,
+ single_scratch,
+ d5,
+ scratch1,
+ scratch2);
+
+ if (result_type_ <= TRBinaryOpIC::INT32) {
+ // If the ne condition is set, result does
+ // not fit in a 32-bit integer.
+ __ b(ne, &transition);
+ }
+
+ // Check if the result fits in a smi.
+ __ vmov(scratch1, single_scratch);
+ __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
+ // If not try to return a heap number.
+ __ b(mi, &return_heap_number);
+ // Tag the result and return.
+ __ SmiTag(r0, scratch1);
+ __ Ret();
+ }
+
+ if (result_type_ >= (op_ == Token::DIV) ? TRBinaryOpIC::HEAP_NUMBER
+ : TRBinaryOpIC::INT32) {
+ __ bind(&return_heap_number);
+ // We are using vfp registers so r5 is available.
+ heap_number_result = r5;
+ GenerateHeapResultAllocation(masm,
+ heap_number_result,
+ heap_number_map,
+ scratch1,
+ scratch2,
+ &call_runtime);
+ __ sub(r0, heap_number_result, Operand(kHeapObjectTag));
+ __ vstr(d5, r0, HeapNumber::kValueOffset);
+ __ mov(r0, heap_number_result);
+ __ Ret();
+ }
+
+ // A DIV operation expecting an integer result falls through
+ // to type transition.
+
+ } else {
+ // We preserved r0 and r1 to be able to call runtime.
+ // Save the left value on the stack.
+ __ Push(r5, r4);
+
+ // Allocate a heap number to store the result.
+ heap_number_result = r5;
+ GenerateHeapResultAllocation(masm,
+ heap_number_result,
+ heap_number_map,
+ scratch1,
+ scratch2,
+ &call_runtime);
+
+ // Load the left value from the value saved on the stack.
+ __ Pop(r1, r0);
+
+ // Call the C function to handle the double operation.
+ FloatingPointHelper::CallCCodeForDoubleOperation(
+ masm, op_, heap_number_result, scratch1);
+ }
+
+ break;
+ }
+
+ case Token::BIT_OR:
+ case Token::BIT_XOR:
+ case Token::BIT_AND:
+ case Token::SAR:
+ case Token::SHR:
+ case Token::SHL: {
+ Label return_heap_number;
+ Register scratch3 = r5;
+ // Convert operands to 32-bit integers. Right in r2 and left in r3. The
+ // registers r0 and r1 (right and left) are preserved for the runtime
+ // call.
+ FloatingPointHelper::LoadNumberAsInt32(masm,
+ left,
+ r3,
+ heap_number_map,
+ scratch1,
+ scratch2,
+ scratch3,
+ d0,
+ &transition);
+ FloatingPointHelper::LoadNumberAsInt32(masm,
+ right,
+ r2,
+ heap_number_map,
+ scratch1,
+ scratch2,
+ scratch3,
+ d0,
+ &transition);
+
+ // The ECMA-262 standard specifies that, for shift operations, only the
+ // 5 least significant bits of the shift value should be used.
+ switch (op_) {
+ case Token::BIT_OR:
+ __ orr(r2, r3, Operand(r2));
+ break;
+ case Token::BIT_XOR:
+ __ eor(r2, r3, Operand(r2));
+ break;
+ case Token::BIT_AND:
+ __ and_(r2, r3, Operand(r2));
+ break;
+ case Token::SAR:
+ __ and_(r2, r2, Operand(0x1f));
+ __ mov(r2, Operand(r3, ASR, r2));
+ break;
+ case Token::SHR:
+ __ and_(r2, r2, Operand(0x1f));
+ __ mov(r2, Operand(r3, LSR, r2), SetCC);
+ // SHR is special because it is required to produce a positive answer.
+ // We only get a negative result if the shift value (r2) is 0.
+ // This result cannot be respresented as a signed 32-bit integer, try
+ // to return a heap number if we can.
+ // The non vfp3 code does not support this special case, so jump to
+ // runtime if we don't support it.
+ if (CpuFeatures::IsSupported(VFP3)) {
+ __ b(mi,
+ (result_type_ <= TRBinaryOpIC::INT32) ? &transition
+ : &return_heap_number);
+ } else {
+ __ b(mi, (result_type_ <= TRBinaryOpIC::INT32) ? &transition
+ : &call_runtime);
+ }
+ break;
+ case Token::SHL:
+ __ and_(r2, r2, Operand(0x1f));
+ __ mov(r2, Operand(r3, LSL, r2));
+ break;
+ default:
+ UNREACHABLE();
+ }
+
+ // Check if the result fits in a smi.
+ __ add(scratch1, r2, Operand(0x40000000), SetCC);
+ // If not try to return a heap number. (We know the result is an int32.)
+ __ b(mi, &return_heap_number);
+ // Tag the result and return.
+ __ SmiTag(r0, r2);
+ __ Ret();
+
+ __ bind(&return_heap_number);
+ if (CpuFeatures::IsSupported(VFP3)) {
+ CpuFeatures::Scope scope(VFP3);
+ heap_number_result = r5;
+ GenerateHeapResultAllocation(masm,
+ heap_number_result,
+ heap_number_map,
+ scratch1,
+ scratch2,
+ &call_runtime);
+
+ if (op_ != Token::SHR) {
+ // Convert the result to a floating point value.
+ __ vmov(double_scratch.low(), r2);
+ __ vcvt_f64_s32(double_scratch, double_scratch.low());
+ } else {
+ // The result must be interpreted as an unsigned 32-bit integer.
+ __ vmov(double_scratch.low(), r2);
+ __ vcvt_f64_u32(double_scratch, double_scratch.low());
+ }
+
+ // Store the result.
+ __ sub(r0, heap_number_result, Operand(kHeapObjectTag));
+ __ vstr(double_scratch, r0, HeapNumber::kValueOffset);
+ __ mov(r0, heap_number_result);
+ __ Ret();
+ } else {
+ // Tail call that writes the int32 in r2 to the heap number in r0, using
+ // r3 as scratch. r0 is preserved and returned.
+ WriteInt32ToHeapNumberStub stub(r2, r0, r3);
+ __ TailCallStub(&stub);
+ }
+
+ break;
+ }
+
+ default:
+ UNREACHABLE();
+ }
+
+ if (transition.is_linked()) {
+ __ bind(&transition);
+ GenerateTypeTransition(masm);
+ }
+
+ __ bind(&call_runtime);
+ GenerateCallRuntime(masm);
}
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 3e125a3..8bb576d 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -1938,8 +1938,9 @@
frame_->EmitPush(cp);
frame_->EmitPush(Operand(pairs));
frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
+ frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
- frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
+ frame_->CallRuntime(Runtime::kDeclareGlobals, 4);
// The result is discarded.
}
@@ -3287,7 +3288,8 @@
// context slot followed by initialization.
frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
} else {
- frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
+ frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
+ frame_->CallRuntime(Runtime::kStoreContextSlot, 4);
}
// Storing a variable must keep the (new) value on the expression
// stack. This is necessary for compiling assignment expressions.
@@ -3637,7 +3639,8 @@
Load(key);
Load(value);
if (property->emit_store()) {
- frame_->CallRuntime(Runtime::kSetProperty, 3);
+ frame_->EmitPush(Operand(Smi::FromInt(NONE))); // PropertyAttributes
+ frame_->CallRuntime(Runtime::kSetProperty, 4);
} else {
frame_->Drop(3);
}
@@ -5170,11 +5173,11 @@
// Set the bit in the map to indicate that it has been checked safe for
// default valueOf and set true result.
- __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
+ __ ldrb(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
__ orr(scratch1_,
scratch1_,
Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
- __ str(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
+ __ strb(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
__ mov(map_result_, Operand(1));
__ jmp(exit_label());
__ bind(&false_result);
@@ -6674,8 +6677,12 @@
public:
DeferredReferenceSetKeyedValue(Register value,
Register key,
- Register receiver)
- : value_(value), key_(key), receiver_(receiver) {
+ Register receiver,
+ StrictModeFlag strict_mode)
+ : value_(value),
+ key_(key),
+ receiver_(receiver),
+ strict_mode_(strict_mode) {
set_comment("[ DeferredReferenceSetKeyedValue");
}
@@ -6685,6 +6692,7 @@
Register value_;
Register key_;
Register receiver_;
+ StrictModeFlag strict_mode_;
};
@@ -6706,7 +6714,9 @@
{ Assembler::BlockConstPoolScope block_const_pool(masm_);
// Call keyed store IC. It has the arguments value, key and receiver in r0,
// r1 and r2.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
// The call must be followed by a nop instruction to indicate that the
// keyed store has been inlined.
@@ -6724,8 +6734,12 @@
public:
DeferredReferenceSetNamedValue(Register value,
Register receiver,
- Handle<String> name)
- : value_(value), receiver_(receiver), name_(name) {
+ Handle<String> name,
+ StrictModeFlag strict_mode)
+ : value_(value),
+ receiver_(receiver),
+ name_(name),
+ strict_mode_(strict_mode) {
set_comment("[ DeferredReferenceSetNamedValue");
}
@@ -6735,6 +6749,7 @@
Register value_;
Register receiver_;
Handle<String> name_;
+ StrictModeFlag strict_mode_;
};
@@ -6754,7 +6769,9 @@
{ Assembler::BlockConstPoolScope block_const_pool(masm_);
// Call keyed store IC. It has the arguments value, key and receiver in r0,
// r1 and r2.
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode_ == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
// The call must be followed by a nop instruction to indicate that the
// named store has been inlined.
@@ -6943,7 +6960,8 @@
Register receiver = r1;
DeferredReferenceSetNamedValue* deferred =
- new DeferredReferenceSetNamedValue(value, receiver, name);
+ new DeferredReferenceSetNamedValue(
+ value, receiver, name, strict_mode_flag());
// Check that the receiver is a heap object.
__ tst(receiver, Operand(kSmiTagMask));
@@ -7129,7 +7147,8 @@
// The deferred code expects value, key and receiver in registers.
DeferredReferenceSetKeyedValue* deferred =
- new DeferredReferenceSetKeyedValue(value, key, receiver);
+ new DeferredReferenceSetKeyedValue(
+ value, key, receiver, strict_mode_flag());
// Check that the value is a smi. As this inlined code does not set the
// write barrier it is only possible to store smi values.
@@ -7214,7 +7233,7 @@
deferred->BindExit();
} else {
- frame()->CallKeyedStoreIC();
+ frame()->CallKeyedStoreIC(strict_mode_flag());
}
}
diff --git a/src/arm/constants-arm.h b/src/arm/constants-arm.h
index 7ac38ed..e6033a8 100644
--- a/src/arm/constants-arm.h
+++ b/src/arm/constants-arm.h
@@ -385,7 +385,10 @@
kDefaultRoundToZero = 1
};
+// This mask does not include the "inexact" or "input denormal" cumulative
+// exceptions flags, because we usually don't want to check for it.
static const uint32_t kVFPExceptionMask = 0xf;
+static const uint32_t kVFPInexactExceptionBit = 1 << 4;
static const uint32_t kVFPFlushToZeroMask = 1 << 24;
static const uint32_t kVFPInvalidExceptionBit = 1;
@@ -411,6 +414,11 @@
static const uint32_t kVFPRoundingModeMask = 3 << 22;
+enum CheckForInexactConversion {
+ kCheckForInexactConversion,
+ kDontCheckForInexactConversion
+};
+
// -----------------------------------------------------------------------------
// Hints.
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index 9f521fb..7a47644 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -339,23 +339,6 @@
}
-FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
- Token::Value op, Expression* left, Expression* right) {
- ASSERT(ShouldInlineSmiCase(op));
- if (op == Token::DIV || op == Token::MOD || op == Token::MUL) {
- // We never generate inlined constant smi operations for these.
- return kNoConstants;
- } else if (right->IsSmiLiteral()) {
- return kRightConstant;
- } else if (left->IsSmiLiteral() && !Token::IsShiftOp(op)) {
- // Don't inline shifts with constant left hand side.
- return kLeftConstant;
- } else {
- return kNoConstants;
- }
-}
-
-
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
}
@@ -793,7 +776,9 @@
prop->key()->AsLiteral()->handle()->IsSmi());
__ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(is_strict()
+ ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// Value in r0 is ignored (declarations are statements).
}
@@ -809,10 +794,11 @@
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
// The context is the first argument.
- __ mov(r1, Operand(pairs));
- __ mov(r0, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
- __ Push(cp, r1, r0);
- __ CallRuntime(Runtime::kDeclareGlobals, 3);
+ __ mov(r2, Operand(pairs));
+ __ mov(r1, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ Push(cp, r2, r1, r0);
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
// Return value is ignored.
}
@@ -1456,7 +1442,9 @@
VisitForStackValue(key);
VisitForStackValue(value);
if (property->emit_store()) {
- __ CallRuntime(Runtime::kSetProperty, 3);
+ __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
+ __ push(r0);
+ __ CallRuntime(Runtime::kSetProperty, 4);
} else {
__ Drop(3);
}
@@ -1634,14 +1622,8 @@
}
Token::Value op = expr->binary_op();
- ConstantOperand constant = ShouldInlineSmiCase(op)
- ? GetConstantOperand(op, expr->target(), expr->value())
- : kNoConstants;
- ASSERT(constant == kRightConstant || constant == kNoConstants);
- if (constant == kNoConstants) {
- __ push(r0); // Left operand goes on the stack.
- VisitForAccumulatorValue(expr->value());
- }
+ __ push(r0); // Left operand goes on the stack.
+ VisitForAccumulatorValue(expr->value());
OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
? OVERWRITE_RIGHT
@@ -1653,8 +1635,7 @@
op,
mode,
expr->target(),
- expr->value(),
- constant);
+ expr->value());
} else {
EmitBinaryOp(op, mode);
}
@@ -1704,217 +1685,11 @@
}
-void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value) {
- Label call_stub, done;
- // Optimistically add smi value with unknown object. If result overflows or is
- // not a smi then we had either a smi overflow or added a smi with a tagged
- // pointer.
- __ mov(r1, Operand(value));
- __ add(r2, r0, r1, SetCC);
- __ b(vs, &call_stub);
- JumpPatchSite patch_site(masm_);
- patch_site.EmitJumpIfNotSmi(r2, &call_stub);
- __ mov(r0, r2);
- __ b(&done);
-
- // Call the shared stub.
- __ bind(&call_stub);
- if (!left_is_constant_smi) {
- __ Swap(r0, r1, r2);
- }
- TypeRecordingBinaryOpStub stub(Token::ADD, mode);
- EmitCallIC(stub.GetCode(), &patch_site);
-
- __ bind(&done);
- context()->Plug(r0);
-}
-
-
-void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value) {
- Label call_stub, done;
- // Optimistically subtract smi value and unknown object. If result overflows
- // or is not a smi then we had either a smi overflow or subtraction between a
- // smi and a tagged pointer.
- __ mov(r1, Operand(value));
- if (left_is_constant_smi) {
- __ sub(r2, r1, r0, SetCC);
- } else {
- __ sub(r2, r0, r1, SetCC);
- }
- __ b(vs, &call_stub);
- JumpPatchSite patch_site(masm_);
- patch_site.EmitJumpIfNotSmi(r2, &call_stub);
- __ mov(r0, r2);
- __ b(&done);
-
- // Call the shared stub.
- __ bind(&call_stub);
- if (!left_is_constant_smi) {
- __ Swap(r0, r1, r2);
- }
- TypeRecordingBinaryOpStub stub(Token::SUB, mode);
- EmitCallIC(stub.GetCode(), &patch_site);
-
- __ bind(&done);
- context()->Plug(r0);
-}
-
-
-void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- Smi* value) {
- Label call_stub, smi_case, done;
- int shift_value = value->value() & 0x1f;
-
- JumpPatchSite patch_site(masm_);
- patch_site.EmitJumpIfSmi(r0, &smi_case);
-
- // Call stub.
- __ bind(&call_stub);
- __ mov(r1, r0);
- __ mov(r0, Operand(value));
- TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), &patch_site);
- __ b(&done);
-
- // Smi case.
- __ bind(&smi_case);
- switch (op) {
- case Token::SHL:
- if (shift_value != 0) {
- __ mov(r1, r0);
- if (shift_value > 1) {
- __ mov(r1, Operand(r1, LSL, shift_value - 1));
- }
- // Convert int result to smi, checking that it is in int range.
- __ SmiTag(r1, SetCC);
- __ b(vs, &call_stub);
- __ mov(r0, r1); // Put result back into r0.
- }
- break;
- case Token::SAR:
- if (shift_value != 0) {
- __ mov(r0, Operand(r0, ASR, shift_value));
- __ bic(r0, r0, Operand(kSmiTagMask));
- }
- break;
- case Token::SHR:
- // SHR must return a positive value. When shifting by 0 or 1 we need to
- // check that smi tagging the result will not create a negative value.
- if (shift_value < 2) {
- __ mov(r2, Operand(shift_value));
- __ SmiUntag(r1, r0);
- if (shift_value != 0) {
- __ mov(r1, Operand(r1, LSR, shift_value));
- }
- __ tst(r1, Operand(0xc0000000));
- __ b(ne, &call_stub);
- __ SmiTag(r0, r1); // result in r0.
- } else {
- __ SmiUntag(r0);
- __ mov(r0, Operand(r0, LSR, shift_value));
- __ SmiTag(r0);
- }
- break;
- default:
- UNREACHABLE();
- }
-
- __ bind(&done);
- context()->Plug(r0);
-}
-
-
-void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- Smi* value) {
- Label smi_case, done;
-
- JumpPatchSite patch_site(masm_);
- patch_site.EmitJumpIfSmi(r0, &smi_case);
-
- // The order of the arguments does not matter for bit-ops with a
- // constant operand.
- __ mov(r1, Operand(value));
- TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), &patch_site);
- __ jmp(&done);
-
- // Smi case.
- __ bind(&smi_case);
- __ mov(r1, Operand(value));
- switch (op) {
- case Token::BIT_OR:
- __ orr(r0, r0, Operand(r1));
- break;
- case Token::BIT_XOR:
- __ eor(r0, r0, Operand(r1));
- break;
- case Token::BIT_AND:
- __ and_(r0, r0, Operand(r1));
- break;
- default:
- UNREACHABLE();
- }
-
- __ bind(&done);
- context()->Plug(r0);
-}
-
-
-void FullCodeGenerator::EmitConstantSmiBinaryOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value) {
- switch (op) {
- case Token::BIT_OR:
- case Token::BIT_XOR:
- case Token::BIT_AND:
- EmitConstantSmiBitOp(expr, op, mode, value);
- break;
- case Token::SHL:
- case Token::SAR:
- case Token::SHR:
- ASSERT(!left_is_constant_smi);
- EmitConstantSmiShiftOp(expr, op, mode, value);
- break;
- case Token::ADD:
- EmitConstantSmiAdd(expr, mode, left_is_constant_smi, value);
- break;
- case Token::SUB:
- EmitConstantSmiSub(expr, mode, left_is_constant_smi, value);
- break;
- default:
- UNREACHABLE();
- }
-}
-
-
void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Expression* left_expr,
- Expression* right_expr,
- ConstantOperand constant) {
- if (constant == kRightConstant) {
- Smi* value = Smi::cast(*right_expr->AsLiteral()->handle());
- EmitConstantSmiBinaryOp(expr, op, mode, false, value);
- return;
- } else if (constant == kLeftConstant) {
- Smi* value = Smi::cast(*left_expr->AsLiteral()->handle());
- EmitConstantSmiBinaryOp(expr, op, mode, true, value);
- return;
- }
-
+ Expression* right_expr) {
Label done, smi_case, stub_call;
Register scratch1 = r2;
@@ -2050,7 +1825,9 @@
__ mov(r1, r0);
__ pop(r0); // Restore value.
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
@@ -2071,7 +1848,9 @@
__ pop(r2);
}
__ pop(r0); // Restore value.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
@@ -2095,9 +1874,9 @@
// r2, and the global object in r1.
__ mov(r2, Operand(var->name()));
__ ldr(r1, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(is_strict()
- ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
@@ -2166,9 +1945,10 @@
case Slot::LOOKUP:
// Call the runtime for the assignment.
__ push(r0); // Value.
- __ mov(r0, Operand(slot->var()->name()));
- __ Push(cp, r0); // Context and name.
- __ CallRuntime(Runtime::kStoreContextSlot, 3);
+ __ mov(r1, Operand(slot->var()->name()));
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ Push(cp, r1, r0); // Context, name, strict mode.
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
break;
}
}
@@ -2203,7 +1983,9 @@
__ pop(r1);
}
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
@@ -2247,7 +2029,9 @@
__ pop(r2);
}
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
@@ -2362,6 +2146,29 @@
}
+void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
+ int arg_count) {
+ // Push copy of the first argument or undefined if it doesn't exist.
+ if (arg_count > 0) {
+ __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
+ } else {
+ __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
+ }
+ __ push(r1);
+
+ // Push the receiver of the enclosing function and do runtime call.
+ __ ldr(r1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
+ __ push(r1);
+ // Push the strict mode flag.
+ __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
+ __ push(r1);
+
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
+ : Runtime::kResolvePossiblyDirectEval, 4);
+}
+
+
void FullCodeGenerator::VisitCall(Call* expr) {
#ifdef DEBUG
// We want to verify that RecordJSReturnSite gets called on all paths
@@ -2391,26 +2198,31 @@
VisitForStackValue(args->at(i));
}
- // Push copy of the function - found below the arguments.
- __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
- __ push(r1);
-
- // Push copy of the first argument or undefined if it doesn't exist.
- if (arg_count > 0) {
- __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
- __ push(r1);
- } else {
- __ push(r2);
+ // If we know that eval can only be shadowed by eval-introduced
+ // variables we attempt to load the global eval function directly
+ // in generated code. If we succeed, there is no need to perform a
+ // context lookup in the runtime system.
+ Label done;
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ Label slow;
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
+ NOT_INSIDE_TYPEOF,
+ &slow);
+ // Push the function and resolve eval.
+ __ push(r0);
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
+ __ jmp(&done);
+ __ bind(&slow);
}
- // Push the receiver of the enclosing function and do runtime call.
- __ ldr(r1,
- MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
+ // Push copy of the function (found below the arguments) and
+ // resolve eval.
+ __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ push(r1);
- // Push the strict mode flag.
- __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
- __ push(r1);
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
+ if (done.is_linked()) {
+ __ bind(&done);
+ }
// The runtime call returns a pair of values in r0 (function) and
// r1 (receiver). Touch up the stack with the right values.
@@ -3430,9 +3242,235 @@
void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
+ Label bailout, done, one_char_separator, long_separator,
+ non_trivial_array, not_size_one_array, loop,
+ empty_separator_loop, one_char_separator_loop,
+ one_char_separator_loop_entry, long_separator_loop;
+
+ ASSERT(args->length() == 2);
+ VisitForStackValue(args->at(1));
+ VisitForAccumulatorValue(args->at(0));
+
+ // All aliases of the same register have disjoint lifetimes.
+ Register array = r0;
+ Register elements = no_reg; // Will be r0.
+ Register result = no_reg; // Will be r0.
+ Register separator = r1;
+ Register array_length = r2;
+ Register result_pos = no_reg; // Will be r2
+ Register string_length = r3;
+ Register string = r4;
+ Register element = r5;
+ Register elements_end = r6;
+ Register scratch1 = r7;
+ Register scratch2 = r9;
+
+ // Separator operand is on the stack.
+ __ pop(separator);
+
+ // Check that the array is a JSArray.
+ __ JumpIfSmi(array, &bailout);
+ __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
+ __ b(ne, &bailout);
+
+ // Check that the array has fast elements.
+ __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
+ __ tst(scratch2, Operand(1 << Map::kHasFastElements));
+ __ b(eq, &bailout);
+
+ // If the array has length zero, return the empty string.
+ __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
+ __ SmiUntag(array_length, SetCC);
+ __ b(ne, &non_trivial_array);
+ __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
+ __ b(&done);
+
+ __ bind(&non_trivial_array);
+
+ // Get the FixedArray containing array's elements.
+ elements = array;
+ __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
+ array = no_reg; // End of array's live range.
+
+ // Check that all array elements are sequential ASCII strings, and
+ // accumulate the sum of their lengths, as a smi-encoded value.
+ __ mov(string_length, Operand(0));
+ __ add(element,
+ elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
+ // Loop condition: while (element < elements_end).
+ // Live values in registers:
+ // elements: Fixed array of strings.
+ // array_length: Length of the fixed array of strings (not smi)
+ // separator: Separator string
+ // string_length: Accumulated sum of string lengths (smi).
+ // element: Current array element.
+ // elements_end: Array end.
+ if (FLAG_debug_code) {
+ __ cmp(array_length, Operand(0));
+ __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
+ }
+ __ bind(&loop);
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
+ __ JumpIfSmi(string, &bailout);
+ __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
+ __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
+ __ add(string_length, string_length, Operand(scratch1));
+ __ b(vs, &bailout);
+ __ cmp(element, elements_end);
+ __ b(lt, &loop);
+
+ // If array_length is 1, return elements[0], a string.
+ __ cmp(array_length, Operand(1));
+ __ b(ne, ¬_size_one_array);
+ __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
+ __ b(&done);
+
+ __ bind(¬_size_one_array);
+
+ // Live values in registers:
+ // separator: Separator string
+ // array_length: Length of the array.
+ // string_length: Sum of string lengths (smi).
+ // elements: FixedArray of strings.
+
+ // Check that the separator is a flat ASCII string.
+ __ JumpIfSmi(separator, &bailout);
+ __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
+
+ // Add (separator length times array_length) - separator length to the
+ // string_length to get the length of the result string. array_length is not
+ // smi but the other values are, so the result is a smi
+ __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
+ __ sub(string_length, string_length, Operand(scratch1));
+ __ smull(scratch2, ip, array_length, scratch1);
+ // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
+ // zero.
+ __ cmp(ip, Operand(0));
+ __ b(ne, &bailout);
+ __ tst(scratch2, Operand(0x80000000));
+ __ b(ne, &bailout);
+ __ add(string_length, string_length, Operand(scratch2));
+ __ b(vs, &bailout);
+ __ SmiUntag(string_length);
+
+ // Get first element in the array to free up the elements register to be used
+ // for the result.
+ __ add(element,
+ elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ result = elements; // End of live range for elements.
+ elements = no_reg;
+ // Live values in registers:
+ // element: First array element
+ // separator: Separator string
+ // string_length: Length of result string (not smi)
+ // array_length: Length of the array.
+ __ AllocateAsciiString(result,
+ string_length,
+ scratch1,
+ scratch2,
+ elements_end,
+ &bailout);
+ // Prepare for looping. Set up elements_end to end of the array. Set
+ // result_pos to the position of the result where to write the first
+ // character.
+ __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
+ result_pos = array_length; // End of live range for array_length.
+ array_length = no_reg;
+ __ add(result_pos,
+ result,
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+
+ // Check the length of the separator.
+ __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
+ __ cmp(scratch1, Operand(Smi::FromInt(1)));
+ __ b(eq, &one_char_separator);
+ __ b(gt, &long_separator);
+
+ // Empty separator case
+ __ bind(&empty_separator_loop);
+ // Live values in registers:
+ // result_pos: the position to which we are currently copying characters.
+ // element: Current array element.
+ // elements_end: Array end.
+
+ // Copy next array element to the result.
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
+ __ SmiUntag(string_length);
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ CopyBytes(string, result_pos, string_length, scratch1);
+ __ cmp(element, elements_end);
+ __ b(lt, &empty_separator_loop); // End while (element < elements_end).
+ ASSERT(result.is(r0));
+ __ b(&done);
+
+ // One-character separator case
+ __ bind(&one_char_separator);
+ // Replace separator with its ascii character value.
+ __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
+ // Jump into the loop after the code that copies the separator, so the first
+ // element is not preceded by a separator
+ __ jmp(&one_char_separator_loop_entry);
+
+ __ bind(&one_char_separator_loop);
+ // Live values in registers:
+ // result_pos: the position to which we are currently copying characters.
+ // element: Current array element.
+ // elements_end: Array end.
+ // separator: Single separator ascii char (in lower byte).
+
+ // Copy the separator character to the result.
+ __ strb(separator, MemOperand(result_pos, 1, PostIndex));
+
+ // Copy next array element to the result.
+ __ bind(&one_char_separator_loop_entry);
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
+ __ SmiUntag(string_length);
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ CopyBytes(string, result_pos, string_length, scratch1);
+ __ cmp(element, elements_end);
+ __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
+ ASSERT(result.is(r0));
+ __ b(&done);
+
+ // Long separator case (separator is more than one character). Entry is at the
+ // label long_separator below.
+ __ bind(&long_separator_loop);
+ // Live values in registers:
+ // result_pos: the position to which we are currently copying characters.
+ // element: Current array element.
+ // elements_end: Array end.
+ // separator: Separator string.
+
+ // Copy the separator to the result.
+ __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
+ __ SmiUntag(string_length);
+ __ add(string,
+ separator,
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ CopyBytes(string, result_pos, string_length, scratch1);
+
+ __ bind(&long_separator);
+ __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
+ __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
+ __ SmiUntag(string_length);
+ __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ CopyBytes(string, result_pos, string_length, scratch1);
+ __ cmp(element, elements_end);
+ __ b(lt, &long_separator_loop); // End while (element < elements_end).
+ ASSERT(result.is(r0));
+ __ b(&done);
+
+ __ bind(&bailout);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
+ __ bind(&done);
context()->Plug(r0);
- return;
}
@@ -3767,7 +3805,9 @@
case NAMED_PROPERTY: {
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
__ pop(r1);
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
@@ -3782,7 +3822,9 @@
case KEYED_PROPERTY: {
__ pop(r1); // Key.
__ pop(r2); // Receiver.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 6c7aa06..0fc6818 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -1400,7 +1400,8 @@
}
-void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm) {
+void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ---------- S t a t e --------------
// -- r0 : value
// -- r1 : key
@@ -1411,11 +1412,16 @@
// Push receiver, key and value for runtime call.
__ Push(r2, r1, r0);
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
+ __ mov(r0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
+ __ Push(r1, r0);
+
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
}
-void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
+void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ---------- S t a t e --------------
// -- r0 : value
// -- r1 : key
@@ -1470,7 +1476,7 @@
// r0: value.
// r1: key.
// r2: receiver.
- GenerateRuntimeSetProperty(masm);
+ GenerateRuntimeSetProperty(masm, strict_mode);
// Check whether the elements is a pixel array.
// r4: elements map.
@@ -1540,7 +1546,7 @@
void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
- Code::ExtraICState extra_ic_state) {
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : receiver
@@ -1552,7 +1558,7 @@
Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
NOT_IN_LOOP,
MONOMORPHIC,
- extra_ic_state);
+ strict_mode);
StubCache::GenerateProbe(masm, flags, r1, r2, r3, r4, r5);
// Cache miss: Jump to runtime.
@@ -1646,7 +1652,8 @@
}
-void StoreIC::GenerateGlobalProxy(MacroAssembler* masm) {
+void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : receiver
@@ -1656,8 +1663,12 @@
__ Push(r1, r2, r0);
+ __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
+ __ mov(r0, Operand(Smi::FromInt(strict_mode)));
+ __ Push(r1, r0);
+
// Do tail-call to runtime routine.
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
}
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index d3c9fee..54ed4ba 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -1154,8 +1154,7 @@
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
new LInstanceOfKnownGlobal(UseFixed(instr->value(), r0), FixedTemp(r4));
- MarkAsSaveDoubles(result);
- return AssignEnvironment(AssignPointerMap(DefineFixed(result, r0)));
+ return MarkAsCall(DefineFixed(result, r0), instr);
}
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index d375617..c5e9271 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -573,7 +573,8 @@
Handle<DeoptimizationInputData> data =
Factory::NewDeoptimizationInputData(length, TENURED);
- data->SetTranslationByteArray(*translations_.CreateByteArray());
+ Handle<ByteArray> translations = translations_.CreateByteArray();
+ data->SetTranslationByteArray(*translations);
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
Handle<FixedArray> literals =
@@ -1985,11 +1986,7 @@
__ BlockConstPoolFor(kAdditionalDelta);
__ mov(temp, Operand(delta * kPointerSize));
__ StoreToSafepointRegisterSlot(temp, temp);
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
- ASSERT_EQ(kAdditionalDelta,
- masm_->InstructionsGeneratedSince(&before_push_delta));
- RecordSafepointWithRegisters(
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
// Put the result value into the result register slot and
// restore all registers.
__ StoreToSafepointRegisterSlot(result, result);
@@ -2586,41 +2583,6 @@
}
-// Truncates a double using a specific rounding mode.
-// Clears the z flag (ne condition) if an overflow occurs.
-void LCodeGen::EmitVFPTruncate(VFPRoundingMode rounding_mode,
- SwVfpRegister result,
- DwVfpRegister double_input,
- Register scratch1,
- Register scratch2) {
- Register prev_fpscr = scratch1;
- Register scratch = scratch2;
-
- // Set custom FPCSR:
- // - Set rounding mode.
- // - Clear vfp cumulative exception flags.
- // - Make sure Flush-to-zero mode control bit is unset.
- __ vmrs(prev_fpscr);
- __ bic(scratch, prev_fpscr, Operand(kVFPExceptionMask |
- kVFPRoundingModeMask |
- kVFPFlushToZeroMask));
- __ orr(scratch, scratch, Operand(rounding_mode));
- __ vmsr(scratch);
-
- // Convert the argument to an integer.
- __ vcvt_s32_f64(result,
- double_input,
- kFPSCRRounding);
-
- // Retrieve FPSCR.
- __ vmrs(scratch);
- // Restore FPSCR.
- __ vmsr(prev_fpscr);
- // Check for vfp exceptions.
- __ tst(scratch, Operand(kVFPExceptionMask));
-}
-
-
void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Register result = ToRegister(instr->result());
@@ -2628,11 +2590,11 @@
Register scratch1 = scratch0();
Register scratch2 = ToRegister(instr->TempAt(0));
- EmitVFPTruncate(kRoundToMinusInf,
- single_scratch,
- input,
- scratch1,
- scratch2);
+ __ EmitVFPTruncate(kRoundToMinusInf,
+ single_scratch,
+ input,
+ scratch1,
+ scratch2);
DeoptimizeIf(ne, instr->environment());
// Move the result back to general purpose register r0.
@@ -2654,11 +2616,11 @@
Register result = ToRegister(instr->result());
Register scratch1 = scratch0();
Register scratch2 = result;
- EmitVFPTruncate(kRoundToNearest,
- double_scratch0().low(),
- input,
- scratch1,
- scratch2);
+ __ EmitVFPTruncate(kRoundToNearest,
+ double_scratch0().low(),
+ input,
+ scratch1,
+ scratch2);
DeoptimizeIf(ne, instr->environment());
__ vmov(result, double_scratch0().low());
@@ -2863,9 +2825,9 @@
// Name is always in r2.
__ mov(r2, Operand(instr->name()));
- Handle<Code> ic(Builtins::builtin(info_->is_strict()
- ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
@@ -2907,7 +2869,9 @@
ASSERT(ToRegister(instr->key()).is(r1));
ASSERT(ToRegister(instr->value()).is(r0));
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
@@ -3371,11 +3335,12 @@
Register scratch1 = scratch0();
Register scratch2 = ToRegister(instr->TempAt(0));
- EmitVFPTruncate(kRoundToZero,
- single_scratch,
- double_input,
- scratch1,
- scratch2);
+ __ EmitVFPTruncate(kRoundToZero,
+ single_scratch,
+ double_input,
+ scratch1,
+ scratch2);
+
// Deoptimize if we had a vfp invalid exception.
DeoptimizeIf(ne, instr->environment());
diff --git a/src/arm/lithium-codegen-arm.h b/src/arm/lithium-codegen-arm.h
index 2d9c6ed..a26f631 100644
--- a/src/arm/lithium-codegen-arm.h
+++ b/src/arm/lithium-codegen-arm.h
@@ -206,11 +206,6 @@
// Specific math operations - used from DoUnaryMathOperation.
void EmitIntegerMathAbs(LUnaryMathOperation* instr);
void DoMathAbs(LUnaryMathOperation* instr);
- void EmitVFPTruncate(VFPRoundingMode rounding_mode,
- SwVfpRegister result,
- DwVfpRegister double_input,
- Register scratch1,
- Register scratch2);
void DoMathFloor(LUnaryMathOperation* instr);
void DoMathRound(LUnaryMathOperation* instr);
void DoMathSqrt(LUnaryMathOperation* instr);
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index e0f2916..9340b61 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -271,6 +271,29 @@
}
+void MacroAssembler::Bfi(Register dst,
+ Register src,
+ Register scratch,
+ int lsb,
+ int width,
+ Condition cond) {
+ ASSERT(0 <= lsb && lsb < 32);
+ ASSERT(0 <= width && width < 32);
+ ASSERT(lsb + width < 32);
+ ASSERT(!scratch.is(dst));
+ if (width == 0) return;
+ if (!CpuFeatures::IsSupported(ARMv7)) {
+ int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
+ bic(dst, dst, Operand(mask));
+ and_(scratch, src, Operand((1 << width) - 1));
+ mov(scratch, Operand(scratch, LSL, lsb));
+ orr(dst, dst, scratch);
+ } else {
+ bfi(dst, src, lsb, width, cond);
+ }
+}
+
+
void MacroAssembler::Bfc(Register dst, int lsb, int width, Condition cond) {
ASSERT(lsb < 32);
if (!CpuFeatures::IsSupported(ARMv7)) {
@@ -1818,9 +1841,9 @@
ldr(scratch, FieldMemOperand(source, HeapNumber::kExponentOffset));
// Get exponent alone in scratch2.
Ubfx(scratch2,
- scratch,
- HeapNumber::kExponentShift,
- HeapNumber::kExponentBits);
+ scratch,
+ HeapNumber::kExponentShift,
+ HeapNumber::kExponentBits);
// Load dest with zero. We use this either for the final shift or
// for the answer.
mov(dest, Operand(0, RelocInfo::NONE));
@@ -1883,6 +1906,52 @@
}
+void MacroAssembler::EmitVFPTruncate(VFPRoundingMode rounding_mode,
+ SwVfpRegister result,
+ DwVfpRegister double_input,
+ Register scratch1,
+ Register scratch2,
+ CheckForInexactConversion check_inexact) {
+ ASSERT(CpuFeatures::IsSupported(VFP3));
+ CpuFeatures::Scope scope(VFP3);
+ Register prev_fpscr = scratch1;
+ Register scratch = scratch2;
+
+ int32_t check_inexact_conversion =
+ (check_inexact == kCheckForInexactConversion) ? kVFPInexactExceptionBit : 0;
+
+ // Set custom FPCSR:
+ // - Set rounding mode.
+ // - Clear vfp cumulative exception flags.
+ // - Make sure Flush-to-zero mode control bit is unset.
+ vmrs(prev_fpscr);
+ bic(scratch,
+ prev_fpscr,
+ Operand(kVFPExceptionMask |
+ check_inexact_conversion |
+ kVFPRoundingModeMask |
+ kVFPFlushToZeroMask));
+ // 'Round To Nearest' is encoded by 0b00 so no bits need to be set.
+ if (rounding_mode != kRoundToNearest) {
+ orr(scratch, scratch, Operand(rounding_mode));
+ }
+ vmsr(scratch);
+
+ // Convert the argument to an integer.
+ vcvt_s32_f64(result,
+ double_input,
+ (rounding_mode == kRoundToZero) ? kDefaultRoundToZero
+ : kFPSCRRounding);
+
+ // Retrieve FPSCR.
+ vmrs(scratch);
+ // Restore FPSCR.
+ vmsr(prev_fpscr);
+ // Check for vfp exceptions.
+ tst(scratch, Operand(kVFPExceptionMask | check_inexact_conversion));
+}
+
+
void MacroAssembler::GetLeastBitsFromSmi(Register dst,
Register src,
int num_least_bits) {
@@ -2389,6 +2458,60 @@
}
+void MacroAssembler::CopyBytes(Register src,
+ Register dst,
+ Register length,
+ Register scratch) {
+ Label align_loop, align_loop_1, word_loop, byte_loop, byte_loop_1, done;
+
+ // Align src before copying in word size chunks.
+ bind(&align_loop);
+ cmp(length, Operand(0));
+ b(eq, &done);
+ bind(&align_loop_1);
+ tst(src, Operand(kPointerSize - 1));
+ b(eq, &word_loop);
+ ldrb(scratch, MemOperand(src, 1, PostIndex));
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ sub(length, length, Operand(1), SetCC);
+ b(ne, &byte_loop_1);
+
+ // Copy bytes in word size chunks.
+ bind(&word_loop);
+ if (FLAG_debug_code) {
+ tst(src, Operand(kPointerSize - 1));
+ Assert(eq, "Expecting alignment for CopyBytes");
+ }
+ cmp(length, Operand(kPointerSize));
+ b(lt, &byte_loop);
+ ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
+#if CAN_USE_UNALIGNED_ACCESSES
+ str(scratch, MemOperand(dst, kPointerSize, PostIndex));
+#else
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ mov(scratch, Operand(scratch, LSR, 8));
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ mov(scratch, Operand(scratch, LSR, 8));
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ mov(scratch, Operand(scratch, LSR, 8));
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+#endif
+ sub(length, length, Operand(kPointerSize));
+ b(&word_loop);
+
+ // Copy the last bytes if any left.
+ bind(&byte_loop);
+ cmp(length, Operand(0));
+ b(eq, &done);
+ bind(&byte_loop_1);
+ ldrb(scratch, MemOperand(src, 1, PostIndex));
+ strb(scratch, MemOperand(dst, 1, PostIndex));
+ sub(length, length, Operand(1), SetCC);
+ b(ne, &byte_loop_1);
+ bind(&done);
+}
+
+
void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
Register source, // Input.
Register scratch) {
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index 3e13c78..acd1d79 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -121,6 +121,15 @@
Condition cond = al);
void Sbfx(Register dst, Register src, int lsb, int width,
Condition cond = al);
+ // The scratch register is not used for ARMv7.
+ // scratch can be the same register as src (in which case it is trashed), but
+ // not the same as dst.
+ void Bfi(Register dst,
+ Register src,
+ Register scratch,
+ int lsb,
+ int width,
+ Condition cond = al);
void Bfc(Register dst, int lsb, int width, Condition cond = al);
void Usat(Register dst, int satpos, const Operand& src,
Condition cond = al);
@@ -234,6 +243,17 @@
}
}
+ // Pop two registers. Pops rightmost register first (from lower address).
+ void Pop(Register src1, Register src2, Condition cond = al) {
+ ASSERT(!src1.is(src2));
+ if (src1.code() > src2.code()) {
+ ldm(ia_w, sp, src1.bit() | src2.bit(), cond);
+ } else {
+ ldr(src2, MemOperand(sp, 4, PostIndex), cond);
+ ldr(src1, MemOperand(sp, 4, PostIndex), cond);
+ }
+ }
+
// Push and pop the registers that can hold pointers, as defined by the
// RegList constant kSafepointSavedRegisters.
void PushSafepointRegisters();
@@ -497,6 +517,14 @@
// Copies a fixed number of fields of heap objects from src to dst.
void CopyFields(Register dst, Register src, RegList temps, int field_count);
+ // Copies a number of bytes from src to dst. All registers are clobbered. On
+ // exit src and dst will point to the place just after where the last byte was
+ // read or written and length will be zero.
+ void CopyBytes(Register src,
+ Register dst,
+ Register length,
+ Register scratch);
+
// ---------------------------------------------------------------------------
// Support functions.
@@ -613,6 +641,19 @@
DwVfpRegister double_scratch,
Label *not_int32);
+// Truncates a double using a specific rounding mode.
+// Clears the z flag (ne condition) if an overflow occurs.
+// If exact_conversion is true, the z flag is also cleared if the conversion
+// was inexact, ie. if the double value could not be converted exactly
+// to a 32bit integer.
+ void EmitVFPTruncate(VFPRoundingMode rounding_mode,
+ SwVfpRegister result,
+ DwVfpRegister double_input,
+ Register scratch1,
+ Register scratch2,
+ CheckForInexactConversion check
+ = kDontCheckForInexactConversion);
+
// Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
// instruction. On pre-ARM5 hardware this routine gives the wrong answer
// for 0 (31 instead of 32). Source and scratch can be the same in which case
@@ -777,11 +818,11 @@
mov(reg, scratch);
}
- void SmiUntag(Register reg) {
- mov(reg, Operand(reg, ASR, kSmiTagSize));
+ void SmiUntag(Register reg, SBit s = LeaveCC) {
+ mov(reg, Operand(reg, ASR, kSmiTagSize), s);
}
- void SmiUntag(Register dst, Register src) {
- mov(dst, Operand(src, ASR, kSmiTagSize));
+ void SmiUntag(Register dst, Register src, SBit s = LeaveCC) {
+ mov(dst, Operand(src, ASR, kSmiTagSize), s);
}
// Jump the register contains a smi.
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index 20d51c6..f475a18 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -1005,7 +1005,9 @@
intptr_t* ptr = reinterpret_cast<intptr_t*>(addr);
return *ptr;
}
- PrintF("Unaligned read at 0x%08x, pc=%p\n", addr, instr);
+ PrintF("Unaligned read at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
UNIMPLEMENTED();
return 0;
#endif
@@ -1023,7 +1025,9 @@
*ptr = value;
return;
}
- PrintF("Unaligned write at 0x%08x, pc=%p\n", addr, instr);
+ PrintF("Unaligned write at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
UNIMPLEMENTED();
#endif
}
@@ -1038,7 +1042,9 @@
uint16_t* ptr = reinterpret_cast<uint16_t*>(addr);
return *ptr;
}
- PrintF("Unaligned unsigned halfword read at 0x%08x, pc=%p\n", addr, instr);
+ PrintF("Unaligned unsigned halfword read at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
UNIMPLEMENTED();
return 0;
#endif
@@ -1072,7 +1078,9 @@
*ptr = value;
return;
}
- PrintF("Unaligned unsigned halfword write at 0x%08x, pc=%p\n", addr, instr);
+ PrintF("Unaligned unsigned halfword write at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
UNIMPLEMENTED();
#endif
}
@@ -1089,7 +1097,9 @@
*ptr = value;
return;
}
- PrintF("Unaligned halfword write at 0x%08x, pc=%p\n", addr, instr);
+ PrintF("Unaligned halfword write at 0x%08x, pc=0x%08" V8PRIxPTR "\n",
+ addr,
+ reinterpret_cast<intptr_t>(instr));
UNIMPLEMENTED();
#endif
}
@@ -2554,6 +2564,7 @@
double dn_value = get_double_from_d_register(vn);
double dm_value = get_double_from_d_register(vm);
double dd_value = dn_value / dm_value;
+ div_zero_vfp_flag_ = (dm_value == 0);
set_d_register_from_double(vd, dd_value);
} else {
UNIMPLEMENTED(); // Not used by V8.
@@ -2788,14 +2799,17 @@
inv_op_vfp_flag_ = get_inv_op_vfp_flag(mode, val, unsigned_integer);
+ double abs_diff =
+ unsigned_integer ? fabs(val - static_cast<uint32_t>(temp))
+ : fabs(val - temp);
+
+ inexact_vfp_flag_ = (abs_diff != 0);
+
if (inv_op_vfp_flag_) {
temp = VFPConversionSaturate(val, unsigned_integer);
} else {
switch (mode) {
case RN: {
- double abs_diff =
- unsigned_integer ? fabs(val - static_cast<uint32_t>(temp))
- : fabs(val - temp);
int val_sign = (val > 0) ? 1 : -1;
if (abs_diff > 0.5) {
temp += val_sign;
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index e250112..60a11f3 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -2671,10 +2671,13 @@
__ Push(r1, r2, r0); // Receiver, name, value.
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
+ __ push(r0); // strict mode
+
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
- __ TailCallExternalReference(store_ic_property, 3, 1);
+ __ TailCallExternalReference(store_ic_property, 4, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -4056,7 +4059,12 @@
// Push receiver, key and value for runtime call.
__ Push(r2, r1, r0);
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
+ __ mov(r0, Operand(Smi::FromInt(
+ Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
+ __ Push(r1, r0);
+
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
return GetCode(flags);
}
diff --git a/src/arm/virtual-frame-arm.cc b/src/arm/virtual-frame-arm.cc
index b4b518c..544e405 100644
--- a/src/arm/virtual-frame-arm.cc
+++ b/src/arm/virtual-frame-arm.cc
@@ -332,9 +332,9 @@
void VirtualFrame::CallStoreIC(Handle<String> name,
bool is_contextual,
StrictModeFlag strict_mode) {
- Handle<Code> ic(Builtins::builtin(strict_mode == kStrictMode
- ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
PopToR0();
RelocInfo::Mode mode;
if (is_contextual) {
@@ -359,8 +359,10 @@
}
-void VirtualFrame::CallKeyedStoreIC() {
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+void VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) {
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
PopToR1R0();
SpillAll();
EmitPop(r2);
diff --git a/src/arm/virtual-frame-arm.h b/src/arm/virtual-frame-arm.h
index b6e794a..76470bd 100644
--- a/src/arm/virtual-frame-arm.h
+++ b/src/arm/virtual-frame-arm.h
@@ -303,7 +303,7 @@
// Call keyed store IC. Value, key and receiver are on the stack. All three
// are consumed. Result is returned in r0.
- void CallKeyedStoreIC();
+ void CallKeyedStoreIC(StrictModeFlag strict_mode);
// Call into an IC stub given the number of arguments it removes
// from the stack. Register arguments to the IC stub are implicit,
diff --git a/src/builtins.cc b/src/builtins.cc
index 8fdc1b1..01e8deb 100644
--- a/src/builtins.cc
+++ b/src/builtins.cc
@@ -1328,12 +1328,12 @@
static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) {
- StoreIC::GenerateMegamorphic(masm, StoreIC::kStoreICNonStrict);
+ StoreIC::GenerateMegamorphic(masm, kNonStrictMode);
}
static void Generate_StoreIC_Megamorphic_Strict(MacroAssembler* masm) {
- StoreIC::GenerateMegamorphic(masm, StoreIC::kStoreICStrict);
+ StoreIC::GenerateMegamorphic(masm, kStrictMode);
}
@@ -1348,17 +1348,22 @@
static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) {
- StoreIC::GenerateGlobalProxy(masm);
+ StoreIC::GenerateGlobalProxy(masm, kNonStrictMode);
}
static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) {
- StoreIC::GenerateGlobalProxy(masm);
+ StoreIC::GenerateGlobalProxy(masm, kStrictMode);
}
static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
- KeyedStoreIC::GenerateGeneric(masm);
+ KeyedStoreIC::GenerateGeneric(masm, kNonStrictMode);
+}
+
+
+static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
+ KeyedStoreIC::GenerateGeneric(masm, kStrictMode);
}
@@ -1372,6 +1377,11 @@
}
+static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
+ KeyedStoreIC::GenerateInitialize(masm);
+}
+
+
#ifdef ENABLE_DEBUGGER_SUPPORT
static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
Debug::GenerateLoadICDebugBreak(masm);
diff --git a/src/builtins.h b/src/builtins.h
index 2733410..5ea4665 100644
--- a/src/builtins.h
+++ b/src/builtins.h
@@ -62,111 +62,116 @@
// Define list of builtins implemented in assembly.
-#define BUILTIN_LIST_A(V) \
- V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(JSConstructCall, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(LazyCompile, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(LazyRecompile, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(NotifyOSR, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- \
- V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(StoreIC_Miss, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- \
- V(LoadIC_Initialize, LOAD_IC, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(LoadIC_PreMonomorphic, LOAD_IC, PREMONOMORPHIC, \
- Code::kNoExtraICState) \
- V(LoadIC_Normal, LOAD_IC, MONOMORPHIC, \
- Code::kNoExtraICState) \
- V(LoadIC_ArrayLength, LOAD_IC, MONOMORPHIC, \
- Code::kNoExtraICState) \
- V(LoadIC_StringLength, LOAD_IC, MONOMORPHIC, \
- Code::kNoExtraICState) \
- V(LoadIC_StringWrapperLength, LOAD_IC, MONOMORPHIC, \
- Code::kNoExtraICState) \
- V(LoadIC_FunctionPrototype, LOAD_IC, MONOMORPHIC, \
- Code::kNoExtraICState) \
- V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC, \
- Code::kNoExtraICState) \
- \
- V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \
- Code::kNoExtraICState) \
- V(KeyedLoadIC_Generic, KEYED_LOAD_IC, MEGAMORPHIC, \
- Code::kNoExtraICState) \
- V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, \
- Code::kNoExtraICState) \
- V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC, \
- Code::kNoExtraICState) \
- \
- V(StoreIC_Initialize, STORE_IC, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(StoreIC_ArrayLength, STORE_IC, MONOMORPHIC, \
- Code::kNoExtraICState) \
- V(StoreIC_Normal, STORE_IC, MONOMORPHIC, \
- Code::kNoExtraICState) \
- V(StoreIC_Megamorphic, STORE_IC, MEGAMORPHIC, \
- Code::kNoExtraICState) \
- V(StoreIC_GlobalProxy, STORE_IC, MEGAMORPHIC, \
- Code::kNoExtraICState) \
- V(StoreIC_Initialize_Strict, STORE_IC, UNINITIALIZED, \
- StoreIC::kStoreICStrict) \
- V(StoreIC_ArrayLength_Strict, STORE_IC, MONOMORPHIC, \
- StoreIC::kStoreICStrict) \
- V(StoreIC_Normal_Strict, STORE_IC, MONOMORPHIC, \
- StoreIC::kStoreICStrict) \
- V(StoreIC_Megamorphic_Strict, STORE_IC, MEGAMORPHIC, \
- StoreIC::kStoreICStrict) \
- V(StoreIC_GlobalProxy_Strict, STORE_IC, MEGAMORPHIC, \
- StoreIC::kStoreICStrict) \
- \
- V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(KeyedStoreIC_Generic, KEYED_STORE_IC, MEGAMORPHIC, \
- Code::kNoExtraICState) \
- \
- /* Uses KeyedLoadIC_Initialize; must be after in list. */ \
- V(FunctionCall, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(FunctionApply, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- \
- V(ArrayCode, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- \
- V(StringConstructCode, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
- \
- V(OnStackReplacement, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState)
+#define BUILTIN_LIST_A(V) \
+ V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructCall, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(LazyCompile, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(LazyRecompile, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(NotifyOSR, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ \
+ V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(StoreIC_Miss, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ \
+ V(LoadIC_Initialize, LOAD_IC, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(LoadIC_PreMonomorphic, LOAD_IC, PREMONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_Normal, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_ArrayLength, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_StringLength, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_StringWrapperLength, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_FunctionPrototype, LOAD_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ \
+ V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_Generic, KEYED_LOAD_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ \
+ V(StoreIC_Initialize, STORE_IC, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(StoreIC_ArrayLength, STORE_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(StoreIC_Normal, STORE_IC, MONOMORPHIC, \
+ Code::kNoExtraICState) \
+ V(StoreIC_Megamorphic, STORE_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ V(StoreIC_GlobalProxy, STORE_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ V(StoreIC_Initialize_Strict, STORE_IC, UNINITIALIZED, \
+ kStrictMode) \
+ V(StoreIC_ArrayLength_Strict, STORE_IC, MONOMORPHIC, \
+ kStrictMode) \
+ V(StoreIC_Normal_Strict, STORE_IC, MONOMORPHIC, \
+ kStrictMode) \
+ V(StoreIC_Megamorphic_Strict, STORE_IC, MEGAMORPHIC, \
+ kStrictMode) \
+ V(StoreIC_GlobalProxy_Strict, STORE_IC, MEGAMORPHIC, \
+ kStrictMode) \
+ \
+ V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(KeyedStoreIC_Generic, KEYED_STORE_IC, MEGAMORPHIC, \
+ Code::kNoExtraICState) \
+ \
+ V(KeyedStoreIC_Initialize_Strict, KEYED_STORE_IC, UNINITIALIZED, \
+ kStrictMode) \
+ V(KeyedStoreIC_Generic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \
+ kStrictMode) \
+ \
+ /* Uses KeyedLoadIC_Initialize; must be after in list. */ \
+ V(FunctionCall, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(FunctionApply, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ \
+ V(ArrayCode, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ \
+ V(StringConstructCode, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ \
+ V(OnStackReplacement, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState)
#ifdef ENABLE_DEBUGGER_SUPPORT
diff --git a/src/compiler.cc b/src/compiler.cc
index f392cce..367de64 100755
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -221,11 +221,12 @@
// or perform on-stack replacement for function with too many
// stack-allocated local variables.
//
- // The encoding is as a signed value, with parameters using the negative
- // indices and locals the non-negative ones.
+ // The encoding is as a signed value, with parameters and receiver using
+ // the negative indices and locals the non-negative ones.
const int limit = LUnallocated::kMaxFixedIndices / 2;
Scope* scope = info->scope();
- if (scope->num_parameters() > limit || scope->num_stack_slots() > limit) {
+ if ((scope->num_parameters() + 1) > limit ||
+ scope->num_stack_slots() > limit) {
AbortAndDisable(info);
// True indicates the compilation pipeline is still going, not
// necessarily that we optimized the code.
diff --git a/src/d8.cc b/src/d8.cc
index 4dcc794..349ec90 100644
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -405,7 +405,7 @@
void Shell::Initialize() {
Shell::counter_map_ = new CounterMap();
// Set up counters
- if (i::FLAG_map_counters != NULL)
+ if (i::StrLength(i::FLAG_map_counters) != 0)
MapCounters(i::FLAG_map_counters);
if (i::FLAG_dump_counters) {
V8::SetCounterFunction(LookupCounter);
@@ -425,6 +425,12 @@
global_template->Set(String::New("quit"), FunctionTemplate::New(Quit));
global_template->Set(String::New("version"), FunctionTemplate::New(Version));
+#ifdef LIVE_OBJECT_LIST
+ global_template->Set(String::New("lol_is_enabled"), Boolean::New(true));
+#else
+ global_template->Set(String::New("lol_is_enabled"), Boolean::New(false));
+#endif
+
Handle<ObjectTemplate> os_templ = ObjectTemplate::New();
AddOSMethods(os_templ);
global_template->Set(String::New("os"), os_templ);
diff --git a/src/d8.js b/src/d8.js
index b0edb70..9798078 100644
--- a/src/d8.js
+++ b/src/d8.js
@@ -117,6 +117,10 @@
var trace_compile = false; // Tracing all compile events?
var trace_debug_json = false; // Tracing all debug json packets?
var last_cmd_line = '';
+//var lol_is_enabled; // Set to true in d8.cc if LIVE_OBJECT_LIST is defined.
+var lol_next_dump_index = 0;
+const kDefaultLolLinesToPrintAtATime = 10;
+const kMaxLolLinesToPrintAtATime = 1000;
var repeat_cmd_line = '';
var is_running = true;
@@ -495,6 +499,13 @@
this.request_ = void 0;
break;
+ case 'liveobjectlist':
+ case 'lol':
+ if (lol_is_enabled) {
+ this.request_ = this.lolToJSONRequest_(args, is_repeating);
+ break;
+ }
+
default:
throw new Error('Unknown command "' + cmd + '"');
}
@@ -539,10 +550,54 @@
};
+// Note: we use detected command repetition as a signal for continuation here.
+DebugRequest.prototype.createLOLRequest = function(command,
+ start_index,
+ lines_to_dump,
+ is_continuation) {
+ if (is_continuation) {
+ start_index = lol_next_dump_index;
+ }
+
+ if (lines_to_dump) {
+ lines_to_dump = parseInt(lines_to_dump);
+ } else {
+ lines_to_dump = kDefaultLolLinesToPrintAtATime;
+ }
+ if (lines_to_dump > kMaxLolLinesToPrintAtATime) {
+ lines_to_dump = kMaxLolLinesToPrintAtATime;
+ }
+
+ // Save the next start_index to dump from:
+ lol_next_dump_index = start_index + lines_to_dump;
+
+ var request = this.createRequest(command);
+ request.arguments = {};
+ request.arguments.start = start_index;
+ request.arguments.count = lines_to_dump;
+
+ return request;
+};
+
+
// Create a JSON request for the evaluation command.
DebugRequest.prototype.makeEvaluateJSONRequest_ = function(expression) {
// Global varaible used to store whether a handle was requested.
lookup_handle = null;
+
+ if (lol_is_enabled) {
+ // Check if the expression is a obj id in the form @<obj id>.
+ var obj_id_match = expression.match(/^@([0-9]+)$/);
+ if (obj_id_match) {
+ var obj_id = parseInt(obj_id_match[1]);
+ // Build a dump request.
+ var request = this.createRequest('getobj');
+ request.arguments = {};
+ request.arguments.obj_id = obj_id;
+ return request.toJSONProtocol();
+ }
+ }
+
// Check if the expression is a handle id in the form #<handle>#.
var handle_match = expression.match(/^#([0-9]*)#$/);
if (handle_match) {
@@ -1103,6 +1158,10 @@
// Build a evaluate request from the text command.
request = this.createRequest('frame');
last_cmd = 'info args';
+ } else if (lol_is_enabled &&
+ args && (args == 'liveobjectlist' || args == 'lol')) {
+ // Build a evaluate request from the text command.
+ return this.liveObjectListToJSONRequest_(null);
} else {
throw new Error('Invalid info arguments.');
}
@@ -1153,6 +1212,262 @@
};
+// Args: [v[erbose]] [<N>] [i[ndex] <i>] [t[ype] <type>] [sp[ace] <space>]
+DebugRequest.prototype.lolMakeListRequest =
+ function(cmd, args, first_arg_index, is_repeating) {
+
+ var request;
+ var start_index = 0;
+ var dump_limit = void 0;
+ var type_filter = void 0;
+ var space_filter = void 0;
+ var prop_filter = void 0;
+ var is_verbose = false;
+ var i;
+
+ for (i = first_arg_index; i < args.length; i++) {
+ var arg = args[i];
+ // Check for [v[erbose]]:
+ if (arg === 'verbose' || arg === 'v') {
+ // Nothing to do. This is already implied by args.length > 3.
+ is_verbose = true;
+
+ // Check for [<N>]:
+ } else if (arg.match(/^[0-9]+$/)) {
+ dump_limit = arg;
+ is_verbose = true;
+
+ // Check for i[ndex] <i>:
+ } else if (arg === 'index' || arg === 'i') {
+ i++;
+ if (args.length < i) {
+ throw new Error('Missing index after ' + arg + '.');
+ }
+ start_index = parseInt(args[i]);
+ // The user input start index starts at 1:
+ if (start_index <= 0) {
+ throw new Error('Invalid index ' + args[i] + '.');
+ }
+ start_index -= 1;
+ is_verbose = true;
+
+ // Check for t[ype] <type>:
+ } else if (arg === 'type' || arg === 't') {
+ i++;
+ if (args.length < i) {
+ throw new Error('Missing type after ' + arg + '.');
+ }
+ type_filter = args[i];
+
+ // Check for space <heap space name>:
+ } else if (arg === 'space' || arg === 'sp') {
+ i++;
+ if (args.length < i) {
+ throw new Error('Missing space name after ' + arg + '.');
+ }
+ space_filter = args[i];
+
+ // Check for property <prop name>:
+ } else if (arg === 'property' || arg === 'prop') {
+ i++;
+ if (args.length < i) {
+ throw new Error('Missing property name after ' + arg + '.');
+ }
+ prop_filter = args[i];
+
+ } else {
+ throw new Error('Unknown args at ' + arg + '.');
+ }
+ }
+
+ // Build the verbose request:
+ if (is_verbose) {
+ request = this.createLOLRequest('lol-'+cmd,
+ start_index,
+ dump_limit,
+ is_repeating);
+ request.arguments.verbose = true;
+ } else {
+ request = this.createRequest('lol-'+cmd);
+ request.arguments = {};
+ }
+
+ request.arguments.filter = {};
+ if (type_filter) {
+ request.arguments.filter.type = type_filter;
+ }
+ if (space_filter) {
+ request.arguments.filter.space = space_filter;
+ }
+ if (prop_filter) {
+ request.arguments.filter.prop = prop_filter;
+ }
+
+ return request;
+}
+
+
+function extractObjId(args) {
+ var id = args;
+ id = id.match(/^@([0-9]+)$/);
+ if (id) {
+ id = id[1];
+ } else {
+ throw new Error('Invalid obj id ' + args + '.');
+ }
+ return parseInt(id);
+}
+
+
+DebugRequest.prototype.lolToJSONRequest_ = function(args, is_repeating) {
+ var request;
+ // Use default command if one is not specified:
+ if (!args) {
+ args = 'info';
+ }
+
+ var orig_args = args;
+ var first_arg_index;
+
+ var arg, i;
+ var args = args.split(/\s+/g);
+ var cmd = args[0];
+ var id;
+
+ // Command: <id> [v[erbose]] ...
+ if (cmd.match(/^[0-9]+$/)) {
+ // Convert to the padded list command:
+ // Command: l[ist] <dummy> <id> [v[erbose]] ...
+
+ // Insert the implicit 'list' in front and process as normal:
+ cmd = 'list';
+ args.unshift(cmd);
+ }
+
+ switch(cmd) {
+ // Command: c[apture]
+ case 'capture':
+ case 'c':
+ request = this.createRequest('lol-capture');
+ break;
+
+ // Command: clear|d[elete] <id>|all
+ case 'clear':
+ case 'delete':
+ case 'del': {
+ if (args.length < 2) {
+ throw new Error('Missing argument after ' + cmd + '.');
+ } else if (args.length > 2) {
+ throw new Error('Too many arguments after ' + cmd + '.');
+ }
+ id = args[1];
+ if (id.match(/^[0-9]+$/)) {
+ // Delete a specific lol record:
+ request = this.createRequest('lol-delete');
+ request.arguments = {};
+ request.arguments.id = parseInt(id);
+ } else if (id === 'all') {
+ // Delete all:
+ request = this.createRequest('lol-reset');
+ } else {
+ throw new Error('Invalid argument after ' + cmd + '.');
+ }
+ break;
+ }
+
+ // Command: diff <id1> <id2> [<dump options>]
+ case 'diff':
+ first_arg_index = 3;
+
+ // Command: list <dummy> <id> [<dump options>]
+ case 'list':
+
+ // Command: ret[ainers] <obj id> [<dump options>]
+ case 'retainers':
+ case 'ret':
+ case 'retaining-paths':
+ case 'rp': {
+ if (cmd === 'ret') cmd = 'retainers';
+ else if (cmd === 'rp') cmd = 'retaining-paths';
+
+ if (!first_arg_index) first_arg_index = 2;
+
+ if (args.length < first_arg_index) {
+ throw new Error('Too few arguments after ' + cmd + '.');
+ }
+
+ var request_cmd = (cmd === 'list') ? 'diff':cmd;
+ request = this.lolMakeListRequest(request_cmd,
+ args,
+ first_arg_index,
+ is_repeating);
+
+ if (cmd === 'diff') {
+ request.arguments.id1 = parseInt(args[1]);
+ request.arguments.id2 = parseInt(args[2]);
+ } else if (cmd == 'list') {
+ request.arguments.id1 = 0;
+ request.arguments.id2 = parseInt(args[1]);
+ } else {
+ request.arguments.id = extractObjId(args[1]);
+ }
+ break;
+ }
+
+ // Command: getid
+ case 'getid': {
+ request = this.createRequest('lol-getid');
+ request.arguments = {};
+ request.arguments.address = args[1];
+ break;
+ }
+
+ // Command: inf[o] [<N>]
+ case 'info':
+ case 'inf': {
+ if (args.length > 2) {
+ throw new Error('Too many arguments after ' + cmd + '.');
+ }
+ // Built the info request:
+ request = this.createLOLRequest('lol-info', 0, args[1], is_repeating);
+ break;
+ }
+
+ // Command: path <obj id 1> <obj id 2>
+ case 'path': {
+ request = this.createRequest('lol-path');
+ request.arguments = {};
+ if (args.length > 2) {
+ request.arguments.id1 = extractObjId(args[1]);
+ request.arguments.id2 = extractObjId(args[2]);
+ } else {
+ request.arguments.id1 = 0;
+ request.arguments.id2 = extractObjId(args[1]);
+ }
+ break;
+ }
+
+ // Command: print
+ case 'print': {
+ request = this.createRequest('lol-print');
+ request.arguments = {};
+ request.arguments.id = extractObjId(args[1]);
+ break;
+ }
+
+ // Command: reset
+ case 'reset': {
+ request = this.createRequest('lol-reset');
+ break;
+ }
+
+ default:
+ throw new Error('Invalid arguments.');
+ }
+ return request.toJSONProtocol();
+};
+
+
// Create a JSON request for the threads command.
DebugRequest.prototype.threadsCommandToJSONRequest_ = function(args) {
// Build a threads request from the text command.
@@ -1239,6 +1554,49 @@
print('');
print('gc - runs the garbage collector');
print('');
+
+ if (lol_is_enabled) {
+ print('liveobjectlist|lol <command> - live object list tracking.');
+ print(' where <command> can be:');
+ print(' c[apture] - captures a LOL list.');
+ print(' clear|del[ete] <id>|all - clears LOL of id <id>.');
+ print(' If \'all\' is unspecified instead, will clear all.');
+ print(' diff <id1> <id2> [<dump options>]');
+ print(' - prints the diff between LOLs id1 and id2.');
+ print(' - also see <dump options> below.');
+ print(' getid <address> - gets the obj id for the specified address if available.');
+ print(' The address must be in hex form prefixed with 0x.');
+ print(' inf[o] [<N>] - lists summary info of all LOL lists.');
+ print(' If N is specified, will print N items at a time.');
+ print(' [l[ist]] <id> [<dump options>]');
+ print(' - prints the listing of objects in LOL id.');
+ print(' - also see <dump options> below.');
+ print(' reset - clears all LOL lists.');
+ print(' ret[ainers] <id> [<dump options>]');
+ print(' - prints the list of retainers of obj id.');
+ print(' - also see <dump options> below.');
+ print(' path <id1> <id2> - prints the retaining path from obj id1 to id2.');
+ print(' If only one id is specified, will print the path from');
+ print(' roots to the specified object if available.');
+ print(' print <id> - prints the obj for the specified obj id if available.');
+ print('');
+ print(' <dump options> includes:');
+ print(' [v[erbose]] - do verbose dump.');
+ print(' [<N>] - dump N items at a time. Implies verbose dump.');
+ print(' If unspecified, N will default to '+
+ kDefaultLolLinesToPrintAtATime+'. Max N is '+
+ kMaxLolLinesToPrintAtATime+'.');
+ print(' [i[ndex] <i>] - start dump from index i. Implies verbose dump.');
+ print(' [t[ype] <type>] - filter by type.');
+ print(' [sp[ace] <space name>] - filter by heap space where <space name> is one of');
+ print(' { cell, code, lo, map, new, old-data, old-pointer }.');
+ print('');
+ print(' If the verbose option, or an option that implies a verbose dump');
+ print(' is specified, then a verbose dump will requested. Else, a summary dump');
+ print(' will be requested.');
+ print('');
+ }
+
print('trace compile');
// hidden command: trace debug json - toggles tracing of debug json packets
print('');
@@ -1339,6 +1697,237 @@
}
+function decodeLolCaptureResponse(body) {
+ var result;
+ result = 'Captured live object list '+ body.id +
+ ': count '+ body.count + ' size ' + body.size;
+ return result;
+}
+
+
+function decodeLolDeleteResponse(body) {
+ var result;
+ result = 'Deleted live object list '+ body.id;
+ return result;
+}
+
+
+function digitsIn(value) {
+ var digits = 0;
+ if (value === 0) value = 1;
+ while (value >= 1) {
+ digits++;
+ value /= 10;
+ }
+ return digits;
+}
+
+
+function padding(value, max_digits) {
+ var padding_digits = max_digits - digitsIn(value);
+ var padding = '';
+ while (padding_digits > 0) {
+ padding += ' ';
+ padding_digits--;
+ }
+ return padding;
+}
+
+
+function decodeLolInfoResponse(body) {
+ var result;
+ var lists = body.lists;
+ var length = lists.length;
+ var first_index = body.first_index + 1;
+ var has_more = ((first_index + length) <= body.count);
+ result = 'captured live object lists';
+ if (has_more || (first_index != 1)) {
+ result += ' ['+ length +' of '+ body.count +
+ ': starting from '+ first_index +']';
+ }
+ result += ':\n';
+ var max_digits = digitsIn(body.count);
+ var last_count = 0;
+ var last_size = 0;
+ for (var i = 0; i < length; i++) {
+ var entry = lists[i];
+ var count = entry.count;
+ var size = entry.size;
+ var index = first_index + i;
+ result += ' [' + padding(index, max_digits) + index + '] id '+ entry.id +
+ ': count '+ count;
+ if (last_count > 0) {
+ result += '(+' + (count - last_count) + ')';
+ }
+ result += ' size '+ size;
+ if (last_size > 0) {
+ result += '(+' + (size - last_size) + ')';
+ }
+ result += '\n';
+ last_count = count;
+ last_size = size;
+ }
+ result += ' total: '+length+' lists\n';
+ if (has_more) {
+ result += ' -- press <enter> for more --\n';
+ } else {
+ repeat_cmd_line = '';
+ }
+ if (length === 0) result += ' none\n';
+
+ return result;
+}
+
+
+function decodeLolListResponse(body, title) {
+
+ var result;
+ var total_count = body.count;
+ var total_size = body.size;
+ var length;
+ var max_digits;
+ var i;
+ var entry;
+ var index;
+
+ var max_count_digits = digitsIn(total_count);
+ var max_size_digits;
+
+ var summary = body.summary;
+ if (summary) {
+
+ var roots_count = 0;
+ var found_root = body.found_root || 0;
+ var found_weak_root = body.found_weak_root || 0;
+
+ // Print the summary result:
+ result = 'summary of objects:\n';
+ length = summary.length;
+ if (found_root !== 0) {
+ roots_count++;
+ }
+ if (found_weak_root !== 0) {
+ roots_count++;
+ }
+ max_digits = digitsIn(length + roots_count);
+ max_size_digits = digitsIn(total_size);
+
+ index = 1;
+ if (found_root !== 0) {
+ result += ' [' + padding(index, max_digits) + index + '] ' +
+ ' count '+ 1 + padding(0, max_count_digits) +
+ ' '+ padding(0, max_size_digits+1) +
+ ' : <root>\n';
+ index++;
+ }
+ if (found_weak_root !== 0) {
+ result += ' [' + padding(index, max_digits) + index + '] ' +
+ ' count '+ 1 + padding(0, max_count_digits) +
+ ' '+ padding(0, max_size_digits+1) +
+ ' : <weak root>\n';
+ index++;
+ }
+
+ for (i = 0; i < length; i++) {
+ entry = summary[i];
+ var count = entry.count;
+ var size = entry.size;
+ result += ' [' + padding(index, max_digits) + index + '] ' +
+ ' count '+ count + padding(count, max_count_digits) +
+ ' size '+ size + padding(size, max_size_digits) +
+ ' : <' + entry.desc + '>\n';
+ index++;
+ }
+ result += '\n total count: '+(total_count+roots_count)+'\n';
+ if (body.size) {
+ result += ' total size: '+body.size+'\n';
+ }
+
+ } else {
+ // Print the full dump result:
+ var first_index = body.first_index + 1;
+ var elements = body.elements;
+ length = elements.length;
+ var has_more = ((first_index + length) <= total_count);
+ result = title;
+ if (has_more || (first_index != 1)) {
+ result += ' ['+ length +' of '+ total_count +
+ ': starting from '+ first_index +']';
+ }
+ result += ':\n';
+ if (length === 0) result += ' none\n';
+ max_digits = digitsIn(length);
+
+ var max_id = 0;
+ var max_size = 0;
+ for (i = 0; i < length; i++) {
+ entry = elements[i];
+ if (entry.id > max_id) max_id = entry.id;
+ if (entry.size > max_size) max_size = entry.size;
+ }
+ var max_id_digits = digitsIn(max_id);
+ max_size_digits = digitsIn(max_size);
+
+ for (i = 0; i < length; i++) {
+ entry = elements[i];
+ index = first_index + i;
+ result += ' ['+ padding(index, max_digits) + index +']';
+ if (entry.id !== 0) {
+ result += ' @' + entry.id + padding(entry.id, max_id_digits) +
+ ': size ' + entry.size + ', ' +
+ padding(entry.size, max_size_digits) + entry.desc + '\n';
+ } else {
+ // Must be a root or weak root:
+ result += ' ' + entry.desc + '\n';
+ }
+ }
+ if (has_more) {
+ result += ' -- press <enter> for more --\n';
+ } else {
+ repeat_cmd_line = '';
+ }
+ if (length === 0) result += ' none\n';
+ }
+
+ return result;
+}
+
+
+function decodeLolDiffResponse(body) {
+ var title = 'objects';
+ return decodeLolListResponse(body, title);
+}
+
+
+function decodeLolRetainersResponse(body) {
+ var title = 'retainers for @' + body.id;
+ return decodeLolListResponse(body, title);
+}
+
+
+function decodeLolPathResponse(body) {
+ return body.path;
+}
+
+
+function decodeLolResetResponse(body) {
+ return 'Reset all live object lists.';
+}
+
+
+function decodeLolGetIdResponse(body) {
+ if (body.id == 0) {
+ return 'Address is invalid, or object has been moved or collected';
+ }
+ return 'obj id is @' + body.id;
+}
+
+
+function decodeLolPrintResponse(body) {
+ return body.dump;
+}
+
+
// Rounds number 'num' to 'length' decimal places.
function roundNumber(num, length) {
var factor = Math.pow(10, length);
@@ -1510,6 +2099,7 @@
case 'evaluate':
case 'lookup':
+ case 'getobj':
if (last_cmd == 'p' || last_cmd == 'print') {
result = body.text;
} else {
@@ -1671,6 +2261,34 @@
}
break;
+ case 'lol-capture':
+ details.text = decodeLolCaptureResponse(body);
+ break;
+ case 'lol-delete':
+ details.text = decodeLolDeleteResponse(body);
+ break;
+ case 'lol-diff':
+ details.text = decodeLolDiffResponse(body);
+ break;
+ case 'lol-getid':
+ details.text = decodeLolGetIdResponse(body);
+ break;
+ case 'lol-info':
+ details.text = decodeLolInfoResponse(body);
+ break;
+ case 'lol-print':
+ details.text = decodeLolPrintResponse(body);
+ break;
+ case 'lol-reset':
+ details.text = decodeLolResetResponse(body);
+ break;
+ case 'lol-retainers':
+ details.text = decodeLolRetainersResponse(body);
+ break;
+ case 'lol-path':
+ details.text = decodeLolPathResponse(body);
+ break;
+
default:
details.text =
'Response for unknown command \'' + response.command() + '\'' +
diff --git a/src/debug-debugger.js b/src/debug-debugger.js
index 1adf73a..bc0f966 100644
--- a/src/debug-debugger.js
+++ b/src/debug-debugger.js
@@ -109,6 +109,7 @@
}
},
};
+var lol_is_enabled = %HasLOLEnabled();
// Create a new break point object and add it to the list of break points.
@@ -1391,6 +1392,8 @@
this.scopeRequest_(request, response);
} else if (request.command == 'evaluate') {
this.evaluateRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'getobj') {
+ this.getobjRequest_(request, response);
} else if (request.command == 'lookup') {
this.lookupRequest_(request, response);
} else if (request.command == 'references') {
@@ -1418,6 +1421,28 @@
} else if (request.command == 'gc') {
this.gcRequest_(request, response);
+ // LiveObjectList tools:
+ } else if (lol_is_enabled && request.command == 'lol-capture') {
+ this.lolCaptureRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-delete') {
+ this.lolDeleteRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-diff') {
+ this.lolDiffRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-getid') {
+ this.lolGetIdRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-info') {
+ this.lolInfoRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-reset') {
+ this.lolResetRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-retainers') {
+ this.lolRetainersRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-path') {
+ this.lolPathRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-print') {
+ this.lolPrintRequest_(request, response);
+ } else if (lol_is_enabled && request.command == 'lol-stats') {
+ this.lolStatsRequest_(request, response);
+
} else {
throw new Error('Unknown command "' + request.command + '" in request');
}
@@ -2011,6 +2036,24 @@
};
+DebugCommandProcessor.prototype.getobjRequest_ = function(request, response) {
+ if (!request.arguments) {
+ return response.failed('Missing arguments');
+ }
+
+ // Pull out arguments.
+ var obj_id = request.arguments.obj_id;
+
+ // Check for legal arguments.
+ if (IS_UNDEFINED(obj_id)) {
+ return response.failed('Argument "obj_id" missing');
+ }
+
+ // Dump the object.
+ response.body = MakeMirror(%GetLOLObj(obj_id));
+};
+
+
DebugCommandProcessor.prototype.lookupRequest_ = function(request, response) {
if (!request.arguments) {
return response.failed('Missing arguments');
@@ -2341,6 +2384,84 @@
};
+DebugCommandProcessor.prototype.lolCaptureRequest_ =
+ function(request, response) {
+ response.body = %CaptureLOL();
+};
+
+
+DebugCommandProcessor.prototype.lolDeleteRequest_ =
+ function(request, response) {
+ var id = request.arguments.id;
+ var result = %DeleteLOL(id);
+ if (result) {
+ response.body = { id: id };
+ } else {
+ response.failed('Failed to delete: live object list ' + id + ' not found.');
+ }
+};
+
+
+DebugCommandProcessor.prototype.lolDiffRequest_ = function(request, response) {
+ var id1 = request.arguments.id1;
+ var id2 = request.arguments.id2;
+ var verbose = request.arguments.verbose;
+ var filter = request.arguments.filter;
+ if (verbose === true) {
+ var start = request.arguments.start;
+ var count = request.arguments.count;
+ response.body = %DumpLOL(id1, id2, start, count, filter);
+ } else {
+ response.body = %SummarizeLOL(id1, id2, filter);
+ }
+};
+
+
+DebugCommandProcessor.prototype.lolGetIdRequest_ = function(request, response) {
+ var address = request.arguments.address;
+ response.body = {};
+ response.body.id = %GetLOLObjId(address);
+};
+
+
+DebugCommandProcessor.prototype.lolInfoRequest_ = function(request, response) {
+ var start = request.arguments.start;
+ var count = request.arguments.count;
+ response.body = %InfoLOL(start, count);
+};
+
+
+DebugCommandProcessor.prototype.lolResetRequest_ = function(request, response) {
+ %ResetLOL();
+};
+
+
+DebugCommandProcessor.prototype.lolRetainersRequest_ =
+ function(request, response) {
+ var id = request.arguments.id;
+ var verbose = request.arguments.verbose;
+ var start = request.arguments.start;
+ var count = request.arguments.count;
+ var filter = request.arguments.filter;
+
+ response.body = %GetLOLObjRetainers(id, Mirror.prototype, verbose,
+ start, count, filter);
+};
+
+
+DebugCommandProcessor.prototype.lolPathRequest_ = function(request, response) {
+ var id1 = request.arguments.id1;
+ var id2 = request.arguments.id2;
+ response.body = {};
+ response.body.path = %GetLOLPath(id1, id2, Mirror.prototype);
+};
+
+
+DebugCommandProcessor.prototype.lolPrintRequest_ = function(request, response) {
+ var id = request.arguments.id;
+ response.body = {};
+ response.body.dump = %PrintLOLObj(id);
+};
// Check whether the previously processed command caused the VM to become
diff --git a/src/debug.cc b/src/debug.cc
index d8201a1..c473941 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -836,7 +836,8 @@
Handle<String> key = Factory::LookupAsciiSymbol("builtins");
Handle<GlobalObject> global = Handle<GlobalObject>(context->global());
RETURN_IF_EMPTY_HANDLE_VALUE(
- SetProperty(global, key, Handle<Object>(global->builtins()), NONE),
+ SetProperty(global, key, Handle<Object>(global->builtins()),
+ NONE, kNonStrictMode),
false);
// Compile the JavaScript for the debugger in the debugger context.
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index cf13def..ea245a4 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -110,7 +110,6 @@
DEFINE_bool(use_range, true, "use hydrogen range analysis")
DEFINE_bool(eliminate_dead_phis, true, "eliminate dead phis")
DEFINE_bool(use_gvn, true, "use hydrogen global value numbering")
-DEFINE_bool(use_peeling, false, "use loop peeling")
DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing")
DEFINE_bool(use_inlining, true, "use function inlining")
DEFINE_bool(limit_inlining, true, "limit code size growth from inlining")
@@ -267,6 +266,12 @@
// ic.cc
DEFINE_bool(use_ic, true, "use inline caching")
+#ifdef LIVE_OBJECT_LIST
+// liveobjectlist.cc
+DEFINE_string(lol_workdir, NULL, "path for lol temp files")
+DEFINE_bool(verify_lol, false, "perform debugging verification for lol")
+#endif
+
// macro-assembler-ia32.cc
DEFINE_bool(native_code_counters, false,
"generate extra code for manipulating stats counters")
@@ -355,7 +360,7 @@
"debugger agent in another process")
DEFINE_bool(debugger_agent, false, "Enable debugger agent")
DEFINE_int(debugger_port, 5858, "Port to use for remote debugging")
-DEFINE_string(map_counters, NULL, "Map counters to a file")
+DEFINE_string(map_counters, "", "Map counters to a file")
DEFINE_args(js_arguments, JSArguments(),
"Pass all remaining arguments to the script. Alias for \"--\".")
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index 252fb92..b3dc95b 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -739,25 +739,13 @@
case Token::SHL:
case Token::SHR:
case Token::SAR: {
- // Figure out if either of the operands is a constant.
- ConstantOperand constant = ShouldInlineSmiCase(op)
- ? GetConstantOperand(op, left, right)
- : kNoConstants;
-
- // Load only the operands that we need to materialize.
- if (constant == kNoConstants) {
- VisitForStackValue(left);
- VisitForAccumulatorValue(right);
- } else if (constant == kRightConstant) {
- VisitForAccumulatorValue(left);
- } else {
- ASSERT(constant == kLeftConstant);
- VisitForAccumulatorValue(right);
- }
+ // Load both operands.
+ VisitForStackValue(left);
+ VisitForAccumulatorValue(right);
SetSourcePosition(expr->position());
if (ShouldInlineSmiCase(op)) {
- EmitInlineSmiBinaryOp(expr, op, mode, left, right, constant);
+ EmitInlineSmiBinaryOp(expr, op, mode, left, right);
} else {
EmitBinaryOp(op, mode);
}
diff --git a/src/full-codegen.h b/src/full-codegen.h
index 655e560..5fb11b4 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -274,12 +274,6 @@
ForwardBailoutStack* const parent_;
};
- enum ConstantOperand {
- kNoConstants,
- kLeftConstant,
- kRightConstant
- };
-
// Type of a member function that generates inline code for a native function.
typedef void (FullCodeGenerator::*InlineFunctionGenerator)
(ZoneList<Expression*>*);
@@ -298,11 +292,6 @@
// operation.
bool ShouldInlineSmiCase(Token::Value op);
- // Compute which (if any) of the operands is a compile-time constant.
- ConstantOperand GetConstantOperand(Token::Value op,
- Expression* left,
- Expression* right);
-
// Helper function to convert a pure value into a test context. The value
// is expected on the stack or the accumulator, depending on the platform.
// See the platform-specific implementation for details.
@@ -432,6 +421,14 @@
Label* done);
void EmitVariableLoad(Variable* expr);
+ enum ResolveEvalFlag {
+ SKIP_CONTEXT_LOOKUP,
+ PERFORM_CONTEXT_LOOKUP
+ };
+
+ // Expects the arguments and the function already pushed.
+ void EmitResolvePossiblyDirectEval(ResolveEvalFlag flag, int arg_count);
+
// Platform-specific support for allocating a new closure based on
// the given function info.
void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
@@ -457,34 +454,7 @@
Token::Value op,
OverwriteMode mode,
Expression* left,
- Expression* right,
- ConstantOperand constant);
-
- void EmitConstantSmiBinaryOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value);
-
- void EmitConstantSmiBitOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- Smi* value);
-
- void EmitConstantSmiShiftOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- Smi* value);
-
- void EmitConstantSmiAdd(Expression* expr,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value);
-
- void EmitConstantSmiSub(Expression* expr,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value);
+ Expression* right);
// Assign to the given expression as if via '='. The right-hand-side value
// is expected in the accumulator.
diff --git a/src/handles-inl.h b/src/handles-inl.h
index 1811023..c0f2fda 100644
--- a/src/handles-inl.h
+++ b/src/handles-inl.h
@@ -51,16 +51,6 @@
}
-template <typename T>
-HandleCell<T>::HandleCell(T* value)
- : location_(HandleScope::CreateHandle(value)) { }
-
-
-template <typename T>
-HandleCell<T>::HandleCell(Handle<T> value)
- : location_(HandleScope::CreateHandle(*value)) { }
-
-
#ifdef DEBUG
inline NoHandleAllocation::NoHandleAllocation() {
v8::ImplementationUtilities::HandleScopeData* current =
diff --git a/src/handles.cc b/src/handles.cc
index 8b2f95b..05c81bb 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -242,17 +242,21 @@
Handle<Object> SetProperty(Handle<JSObject> object,
Handle<String> key,
Handle<Object> value,
- PropertyAttributes attributes) {
- CALL_HEAP_FUNCTION(object->SetProperty(*key, *value, attributes), Object);
+ PropertyAttributes attributes,
+ StrictModeFlag strict) {
+ CALL_HEAP_FUNCTION(object->SetProperty(*key, *value, attributes, strict),
+ Object);
}
Handle<Object> SetProperty(Handle<Object> object,
Handle<Object> key,
Handle<Object> value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ StrictModeFlag strict) {
CALL_HEAP_FUNCTION(
- Runtime::SetObjectProperty(object, key, value, attributes), Object);
+ Runtime::SetObjectProperty(object, key, value, attributes, strict),
+ Object);
}
@@ -304,10 +308,12 @@
Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
Handle<String> key,
Handle<Object> value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ StrictModeFlag strict) {
CALL_HEAP_FUNCTION(object->SetPropertyWithInterceptor(*key,
*value,
- attributes),
+ attributes,
+ strict),
Object);
}
diff --git a/src/handles.h b/src/handles.h
index 8f1664b..9d3588b 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -93,55 +93,6 @@
};
-// A handle-scope based variable. The value stored in the variable can change
-// over time. The value stored in the variable at any time is a root
-// for garbage collection.
-// The variable is backed by the current HandleScope.
-template <typename T>
-class HandleCell {
- public:
- // Create a new HandleCell holding the given value.
- explicit HandleCell(Handle<T> value);
- explicit HandleCell(T* value);
-
- // Create an alias of an existing HandleCell.
- explicit HandleCell(const HandleCell<T>& value)
- : location_(value.location_) { }
-
- INLINE(T* operator->() const) { return operator*(); }
- INLINE(T* operator*() const) {
- return *location_;
- }
- INLINE(void operator=(T* value)) {
- *location_ = value;
- }
- INLINE(void operator=(Handle<T> value)) {
- *location_ = *value;
- }
- INLINE(void operator=(const HandleCell<T>& value)) {
- *location_ = *value.location_;
- }
-
- // Extract the value of the variable and cast it to a give type.
- // This is typically used for calling methods on a more specialized type.
- template <typename S>
- inline S* cast() {
- S::cast(*location_);
- return *reinterpret_cast<S**>(location_);
- }
-
- Handle<T> ToHandle() const {
- return Handle<T>(*location_);
- }
-
- private:
- // Prevent implicit constructor from being created.
- HandleCell();
-
- T** location_;
-};
-
-
// A stack-allocated class that governs a number of local handles.
// After a handle scope has been created, all local handles will be
// allocated within that handle scope until either the handle scope is
@@ -161,15 +112,7 @@
}
~HandleScope() {
- current_.next = prev_next_;
- current_.level--;
- if (current_.limit != prev_limit_) {
- current_.limit = prev_limit_;
- DeleteExtensions();
- }
-#ifdef DEBUG
- ZapRange(prev_next_, prev_limit_);
-#endif
+ CloseScope();
}
// Counts the number of allocated handles.
@@ -197,6 +140,26 @@
static Address current_limit_address();
static Address current_level_address();
+ // Closes the HandleScope (invalidating all handles
+ // created in the scope of the HandleScope) and returns
+ // a Handle backed by the parent scope holding the
+ // value of the argument handle.
+ template <typename T>
+ Handle<T> CloseAndEscape(Handle<T> handle_value) {
+ T* value = *handle_value;
+ // Throw away all handles in the current scope.
+ CloseScope();
+ // Allocate one handle in the parent scope.
+ ASSERT(current_.level > 0);
+ Handle<T> result(CreateHandle<T>(value));
+ // Reinitialize the current scope (so that it's ready
+ // to be used or closed again).
+ prev_next_ = current_.next;
+ prev_limit_ = current_.limit;
+ current_.level++;
+ return result;
+ }
+
private:
// Prevent heap allocation or illegal handle scopes.
HandleScope(const HandleScope&);
@@ -204,9 +167,23 @@
void* operator new(size_t size);
void operator delete(void* size_t);
+ inline void CloseScope() {
+ current_.next = prev_next_;
+ current_.level--;
+ if (current_.limit != prev_limit_) {
+ current_.limit = prev_limit_;
+ DeleteExtensions();
+ }
+#ifdef DEBUG
+ ZapRange(prev_next_, prev_limit_);
+#endif
+ }
+
static v8::ImplementationUtilities::HandleScopeData current_;
- Object** const prev_next_;
- Object** const prev_limit_;
+ // Holds values on entry. The prev_next_ value is never NULL
+ // on_entry, but is set to NULL when this scope is closed.
+ Object** prev_next_;
+ Object** prev_limit_;
// Extend the handle scope making room for more handles.
static internal::Object** Extend();
@@ -246,12 +223,14 @@
Handle<Object> SetProperty(Handle<JSObject> object,
Handle<String> key,
Handle<Object> value,
- PropertyAttributes attributes);
+ PropertyAttributes attributes,
+ StrictModeFlag strict);
Handle<Object> SetProperty(Handle<Object> object,
Handle<Object> key,
Handle<Object> value,
- PropertyAttributes attributes);
+ PropertyAttributes attributes,
+ StrictModeFlag strict);
Handle<Object> ForceSetProperty(Handle<JSObject> object,
Handle<Object> key,
@@ -282,7 +261,8 @@
Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
Handle<String> key,
Handle<Object> value,
- PropertyAttributes attributes);
+ PropertyAttributes attributes,
+ StrictModeFlag strict);
Handle<Object> SetElement(Handle<JSObject> object,
uint32_t index,
diff --git a/src/heap-profiler.cc b/src/heap-profiler.cc
index 732d2f4..07b631f 100644
--- a/src/heap-profiler.cc
+++ b/src/heap-profiler.cc
@@ -911,22 +911,27 @@
class CountingRetainersIterator {
public:
CountingRetainersIterator(const JSObjectsCluster& child_cluster,
+ HeapEntriesAllocator* allocator,
HeapEntriesMap* map)
- : child_(ClusterAsHeapObject(child_cluster)), map_(map) {
+ : child_(ClusterAsHeapObject(child_cluster)),
+ allocator_(allocator),
+ map_(map) {
if (map_->Map(child_) == NULL)
- map_->Pair(child_, HeapEntriesMap::kHeapEntryPlaceholder);
+ map_->Pair(child_, allocator_, HeapEntriesMap::kHeapEntryPlaceholder);
}
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
if (map_->Map(ClusterAsHeapObject(cluster)) == NULL)
map_->Pair(ClusterAsHeapObject(cluster),
+ allocator_,
HeapEntriesMap::kHeapEntryPlaceholder);
map_->CountReference(ClusterAsHeapObject(cluster), child_);
}
private:
HeapObject* child_;
+ HeapEntriesAllocator* allocator_;
HeapEntriesMap* map_;
};
@@ -934,6 +939,7 @@
class AllocatingRetainersIterator {
public:
AllocatingRetainersIterator(const JSObjectsCluster& child_cluster,
+ HeapEntriesAllocator*,
HeapEntriesMap* map)
: child_(ClusterAsHeapObject(child_cluster)), map_(map) {
child_entry_ = map_->Map(child_);
@@ -966,8 +972,9 @@
class AggregatingRetainerTreeIterator {
public:
explicit AggregatingRetainerTreeIterator(ClustersCoarser* coarser,
+ HeapEntriesAllocator* allocator,
HeapEntriesMap* map)
- : coarser_(coarser), map_(map) {
+ : coarser_(coarser), allocator_(allocator), map_(map) {
}
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree) {
@@ -981,25 +988,28 @@
tree->ForEach(&retainers_aggregator);
tree_to_iterate = &dest_tree_;
}
- RetainersIterator iterator(cluster, map_);
+ RetainersIterator iterator(cluster, allocator_, map_);
tree_to_iterate->ForEach(&iterator);
}
private:
ClustersCoarser* coarser_;
+ HeapEntriesAllocator* allocator_;
HeapEntriesMap* map_;
};
-class AggregatedRetainerTreeAllocator {
+class AggregatedRetainerTreeAllocator : public HeapEntriesAllocator {
public:
AggregatedRetainerTreeAllocator(HeapSnapshot* snapshot,
int* root_child_index)
: snapshot_(snapshot), root_child_index_(root_child_index) {
}
+ ~AggregatedRetainerTreeAllocator() { }
- HeapEntry* GetEntry(
- HeapObject* obj, int children_count, int retainers_count) {
+ HeapEntry* AllocateEntry(
+ HeapThing ptr, int children_count, int retainers_count) {
+ HeapObject* obj = reinterpret_cast<HeapObject*>(ptr);
JSObjectsCluster cluster = HeapObjectAsCluster(obj);
const char* name = cluster.GetSpecialCaseName();
if (name == NULL) {
@@ -1018,12 +1028,13 @@
template<class Iterator>
void AggregatedHeapSnapshotGenerator::IterateRetainers(
- HeapEntriesMap* entries_map) {
+ HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map) {
RetainerHeapProfile* p = agg_snapshot_->js_retainer_profile();
AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_1(
- p->coarser(), entries_map);
+ p->coarser(), allocator, entries_map);
p->retainers_tree()->ForEach(&agg_ret_iter_1);
- AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_2(NULL, entries_map);
+ AggregatingRetainerTreeIterator<Iterator> agg_ret_iter_2(
+ NULL, allocator, entries_map);
p->aggregator()->output_tree().ForEach(&agg_ret_iter_2);
}
@@ -1042,7 +1053,9 @@
agg_snapshot_->js_cons_profile()->ForEach(&counting_cons_iter);
histogram_entities_count += counting_cons_iter.entities_count();
HeapEntriesMap entries_map;
- IterateRetainers<CountingRetainersIterator>(&entries_map);
+ int root_child_index = 0;
+ AggregatedRetainerTreeAllocator allocator(snapshot, &root_child_index);
+ IterateRetainers<CountingRetainersIterator>(&allocator, &entries_map);
histogram_entities_count += entries_map.entries_count();
histogram_children_count += entries_map.total_children_count();
histogram_retainers_count += entries_map.total_retainers_count();
@@ -1056,10 +1069,7 @@
snapshot->AllocateEntries(histogram_entities_count,
histogram_children_count,
histogram_retainers_count);
- snapshot->AddEntry(HeapSnapshot::kInternalRootObject,
- root_children_count,
- 0);
- int root_child_index = 0;
+ snapshot->AddRootEntry(root_children_count);
for (int i = FIRST_NONSTRING_TYPE; i <= kAllStringsType; ++i) {
if (agg_snapshot_->info()[i].bytes() > 0) {
AddEntryFromAggregatedSnapshot(snapshot,
@@ -1075,11 +1085,10 @@
AllocatingConstructorHeapProfileIterator alloc_cons_iter(
snapshot, &root_child_index);
agg_snapshot_->js_cons_profile()->ForEach(&alloc_cons_iter);
- AggregatedRetainerTreeAllocator allocator(snapshot, &root_child_index);
- entries_map.UpdateEntries(&allocator);
+ entries_map.AllocateEntries();
// Fill up references.
- IterateRetainers<AllocatingRetainersIterator>(&entries_map);
+ IterateRetainers<AllocatingRetainersIterator>(&allocator, &entries_map);
snapshot->SetDominatorsToSelf();
}
diff --git a/src/heap-profiler.h b/src/heap-profiler.h
index 90c664e..20ba457 100644
--- a/src/heap-profiler.h
+++ b/src/heap-profiler.h
@@ -340,6 +340,7 @@
class HeapEntriesMap;
+class HeapEntriesAllocator;
class HeapSnapshot;
class AggregatedHeapSnapshotGenerator {
@@ -354,7 +355,8 @@
void CalculateStringsStats();
void CollectStats(HeapObject* obj);
template<class Iterator>
- void IterateRetainers(HeapEntriesMap* entries_map);
+ void IterateRetainers(
+ HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map);
AggregatedHeapSnapshot* agg_snapshot_;
};
diff --git a/src/heap.cc b/src/heap.cc
index 1fadec3..34ab9aa 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -844,8 +844,6 @@
ContextSlotCache::Clear();
DescriptorLookupCache::Clear();
- RuntimeProfiler::MarkCompactPrologue(is_compacting);
-
CompilationCache::MarkCompactPrologue();
CompletelyClearInstanceofCache();
@@ -1056,20 +1054,13 @@
// Scavenge object reachable from the global contexts list directly.
scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
- // Scavenge objects reachable from the runtime-profiler sampler
- // window directly.
- Object** sampler_window_address = RuntimeProfiler::SamplerWindowAddress();
- int sampler_window_size = RuntimeProfiler::SamplerWindowSize();
- scavenge_visitor.VisitPointers(
- sampler_window_address,
- sampler_window_address + sampler_window_size);
-
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
LiveObjectList::UpdateReferencesForScavengeGC();
+ RuntimeProfiler::UpdateSamplesAfterScavenge();
ASSERT(new_space_front == new_space_.top());
@@ -5336,7 +5327,11 @@
for (int i = 0; i < object_stack_.length(); i++) {
if (i > 0) PrintF("\n |\n |\n V\n\n");
Object* obj = object_stack_[i];
+#ifdef OBJECT_PRINT
obj->Print();
+#else
+ obj->ShortPrint();
+#endif
}
PrintF("=====================================\n");
}
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index 2de70ff..e40685c 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -535,24 +535,15 @@
HBasicBlock* HGraphBuilder::CreateWhile(IterationStatement* statement,
- HBasicBlock* condition_entry,
- HBasicBlock* exit_block,
- HBasicBlock* body_exit,
- HBasicBlock* break_block,
HBasicBlock* loop_entry,
- HBasicBlock* loop_exit) {
+ HBasicBlock* cond_false,
+ HBasicBlock* body_exit,
+ HBasicBlock* break_block) {
if (break_block != NULL) break_block->SetJoinId(statement->ExitId());
HBasicBlock* new_exit =
- CreateJoin(exit_block, break_block, statement->ExitId());
-
- if (loop_entry != NULL) {
- if (body_exit != NULL) body_exit->Goto(loop_entry, true);
- loop_entry->SetJoinId(statement->EntryId());
- new_exit = CreateJoin(new_exit, loop_exit, statement->ExitId());
- } else {
- if (body_exit != NULL) body_exit->Goto(condition_entry, true);
- }
- condition_entry->PostProcessLoopHeader(statement);
+ CreateJoin(cond_false, break_block, statement->ExitId());
+ if (body_exit != NULL) body_exit->Goto(loop_entry, true);
+ loop_entry->PostProcessLoopHeader(statement);
return new_exit;
}
@@ -2317,14 +2308,12 @@
}
-HSubgraph* HGraphBuilder::CreateLoopHeaderSubgraph(HEnvironment* env) {
- HSubgraph* subgraph = new HSubgraph(graph());
- HBasicBlock* block = graph()->CreateBasicBlock();
- HEnvironment* new_env = env->CopyAsLoopHeader(block);
- block->SetInitialEnvironment(new_env);
- subgraph->Initialize(block);
- subgraph->entry_block()->AttachLoopInformation();
- return subgraph;
+HBasicBlock* HGraphBuilder::CreateLoopHeader() {
+ HBasicBlock* header = graph()->CreateBasicBlock();
+ HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
+ header->SetInitialEnvironment(entry_env);
+ header->AttachLoopInformation();
+ return header;
}
@@ -2681,120 +2670,80 @@
void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
ASSERT(current_block() != NULL);
PreProcessOsrEntry(stmt);
+ HBasicBlock* loop_entry = CreateLoopHeader();
+ current_block()->Goto(loop_entry, false);
+ set_current_block(loop_entry);
- HSubgraph* body_graph = CreateLoopHeaderSubgraph(environment());
- current_block()->Goto(body_graph->entry_block(), false);
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
- ADD_TO_SUBGRAPH(body_graph, stmt->body());
+ Visit(stmt->body());
+ CHECK_BAILOUT;
}
- HBasicBlock* body_exit = JoinContinue(stmt,
- body_graph->exit_block(),
- break_info.continue_block());
- body_graph->set_exit_block(body_exit);
-
- if (body_graph->exit_block() == NULL || stmt->cond()->ToBooleanIsTrue()) {
- set_current_block(CreateEndless(stmt,
- body_graph->entry_block(),
- body_graph->exit_block(),
- break_info.break_block()));
+ HBasicBlock* body_exit =
+ JoinContinue(stmt, current_block(), break_info.continue_block());
+ HBasicBlock* loop_exit = NULL;
+ if (body_exit == NULL || stmt->cond()->ToBooleanIsTrue()) {
+ loop_exit = CreateEndless(stmt,
+ loop_entry,
+ body_exit,
+ break_info.break_block());
} else {
- HSubgraph* go_back = CreateEmptySubgraph();
- HSubgraph* exit = CreateEmptySubgraph();
- {
- SubgraphScope scope(this, body_graph);
- VISIT_FOR_CONTROL(stmt->cond(),
- go_back->entry_block(),
- exit->entry_block());
- go_back->entry_block()->SetJoinId(stmt->BackEdgeId());
- exit->entry_block()->SetJoinId(stmt->ExitId());
- }
- set_current_block(CreateDoWhile(stmt,
- body_graph->entry_block(),
- go_back->exit_block(),
- exit->exit_block(),
- break_info.break_block()));
+ set_current_block(body_exit);
+ HBasicBlock* cond_true = graph()->CreateBasicBlock();
+ HBasicBlock* cond_false = graph()->CreateBasicBlock();
+ VISIT_FOR_CONTROL(stmt->cond(), cond_true, cond_false);
+ cond_true->SetJoinId(stmt->BackEdgeId());
+ cond_false->SetJoinId(stmt->ExitId());
+ loop_exit = CreateDoWhile(stmt,
+ loop_entry,
+ cond_true,
+ cond_false,
+ break_info.break_block());
}
+ set_current_block(loop_exit);
}
void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
ASSERT(current_block() != NULL);
PreProcessOsrEntry(stmt);
+ HBasicBlock* loop_entry = CreateLoopHeader();
+ current_block()->Goto(loop_entry, false);
+ set_current_block(loop_entry);
- HSubgraph* cond_graph = NULL;
- HSubgraph* body_graph = NULL;
- HSubgraph* exit_graph = NULL;
-
- // If the condition is constant true, do not generate a condition subgraph.
- if (stmt->cond()->ToBooleanIsTrue()) {
- body_graph = CreateLoopHeaderSubgraph(environment());
- current_block()->Goto(body_graph->entry_block(), false);
- } else {
- cond_graph = CreateLoopHeaderSubgraph(environment());
- current_block()->Goto(cond_graph->entry_block(), false);
- body_graph = CreateEmptySubgraph();
- exit_graph = CreateEmptySubgraph();
- {
- SubgraphScope scope(this, cond_graph);
- VISIT_FOR_CONTROL(stmt->cond(),
- body_graph->entry_block(),
- exit_graph->entry_block());
- body_graph->entry_block()->SetJoinId(stmt->BodyId());
- exit_graph->entry_block()->SetJoinId(stmt->ExitId());
- }
+ // If the condition is constant true, do not generate a branch.
+ HBasicBlock* cond_false = NULL;
+ if (!stmt->cond()->ToBooleanIsTrue()) {
+ HBasicBlock* cond_true = graph()->CreateBasicBlock();
+ cond_false = graph()->CreateBasicBlock();
+ VISIT_FOR_CONTROL(stmt->cond(), cond_true, cond_false);
+ cond_true->SetJoinId(stmt->BodyId());
+ cond_false->SetJoinId(stmt->ExitId());
+ set_current_block(cond_true);
}
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
- ADD_TO_SUBGRAPH(body_graph, stmt->body());
+ Visit(stmt->body());
+ CHECK_BAILOUT;
}
- HBasicBlock* body_exit = JoinContinue(stmt,
- body_graph->exit_block(),
- break_info.continue_block());
- body_graph->set_exit_block(body_exit);
-
- if (cond_graph != NULL) {
- set_current_block(CreatePeeledWhile(stmt,
- cond_graph->entry_block(),
- exit_graph->exit_block(),
- body_graph->exit_block(),
- break_info.break_block()));
- } else {
- // TODO(fschneider): Implement peeling for endless loops as well.
- set_current_block(CreateEndless(stmt,
- body_graph->entry_block(),
- body_graph->exit_block(),
- break_info.break_block()));
- }
-}
-
-
-HBasicBlock* HGraphBuilder::CreatePeeledWhile(IterationStatement* stmt,
- HBasicBlock* condition_entry,
- HBasicBlock* exit_block,
- HBasicBlock* body_exit,
- HBasicBlock* break_block) {
- HBasicBlock* loop_entry = NULL;
+ HBasicBlock* body_exit =
+ JoinContinue(stmt, current_block(), break_info.continue_block());
HBasicBlock* loop_exit = NULL;
- if (FLAG_use_peeling && body_exit != NULL && stmt != peeled_statement_) {
- // Save the last peeled iteration statement to prevent infinite recursion.
- IterationStatement* outer_peeled_statement = peeled_statement_;
- peeled_statement_ = stmt;
- HSubgraph* loop = CreateGotoSubgraph(body_exit->last_environment());
- AddToSubgraph(loop, stmt);
- peeled_statement_ = outer_peeled_statement;
- if (HasStackOverflow()) return NULL;
- loop_entry = loop->entry_block();
- loop_exit = loop->exit_block();
+ if (stmt->cond()->ToBooleanIsTrue()) {
+ // TODO(fschneider): Implement peeling for endless loops as well.
+ loop_exit = CreateEndless(stmt,
+ loop_entry,
+ body_exit,
+ break_info.break_block());
+ } else {
+ loop_exit = CreateWhile(stmt,
+ loop_entry,
+ cond_false,
+ body_exit,
+ break_info.break_block());
}
- return CreateWhile(stmt,
- condition_entry,
- exit_block,
- body_exit,
- break_block,
- loop_entry,
- loop_exit);
+ set_current_block(loop_exit);
}
@@ -2806,59 +2755,49 @@
}
ASSERT(current_block() != NULL);
PreProcessOsrEntry(stmt);
+ HBasicBlock* loop_entry = CreateLoopHeader();
+ current_block()->Goto(loop_entry, false);
+ set_current_block(loop_entry);
- HSubgraph* cond_graph = NULL;
- HSubgraph* body_graph = NULL;
- HSubgraph* exit_graph = NULL;
+ HBasicBlock* cond_false = NULL;
if (stmt->cond() != NULL) {
- cond_graph = CreateLoopHeaderSubgraph(environment());
- current_block()->Goto(cond_graph->entry_block(), false);
- body_graph = CreateEmptySubgraph();
- exit_graph = CreateEmptySubgraph();
- {
- SubgraphScope scope(this, cond_graph);
- VISIT_FOR_CONTROL(stmt->cond(),
- body_graph->entry_block(),
- exit_graph->entry_block());
- body_graph->entry_block()->SetJoinId(stmt->BodyId());
- exit_graph->entry_block()->SetJoinId(stmt->ExitId());
- }
- } else {
- body_graph = CreateLoopHeaderSubgraph(environment());
- current_block()->Goto(body_graph->entry_block(), false);
+ HBasicBlock* cond_true = graph()->CreateBasicBlock();
+ cond_false = graph()->CreateBasicBlock();
+ VISIT_FOR_CONTROL(stmt->cond(), cond_true, cond_false);
+ cond_true->SetJoinId(stmt->BodyId());
+ cond_false->SetJoinId(stmt->ExitId());
+ set_current_block(cond_true);
}
+
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
- ADD_TO_SUBGRAPH(body_graph, stmt->body());
+ Visit(stmt->body());
+ CHECK_BAILOUT;
+ }
+ HBasicBlock* body_exit =
+ JoinContinue(stmt, current_block(), break_info.continue_block());
+
+ if (stmt->next() != NULL && body_exit != NULL) {
+ set_current_block(body_exit);
+ Visit(stmt->next());
+ CHECK_BAILOUT;
+ body_exit = current_block();
}
- HSubgraph* next_graph = NULL;
- HBasicBlock* body_exit = JoinContinue(stmt,
- body_graph->exit_block(),
- break_info.continue_block());
- body_graph->set_exit_block(body_exit);
-
- if (stmt->next() != NULL && body_graph->exit_block() != NULL) {
- next_graph =
- CreateGotoSubgraph(body_graph->exit_block()->last_environment());
- body_graph->exit_block()->Goto(next_graph->entry_block());
- next_graph->entry_block()->SetJoinId(stmt->ContinueId());
- ADD_TO_SUBGRAPH(next_graph, stmt->next());
- body_graph->set_exit_block(next_graph->exit_block());
- }
-
- if (cond_graph != NULL) {
- set_current_block(CreatePeeledWhile(stmt,
- cond_graph->entry_block(),
- exit_graph->exit_block(),
- body_graph->exit_block(),
- break_info.break_block()));
+ HBasicBlock* loop_exit = NULL;
+ if (stmt->cond() == NULL) {
+ loop_exit = CreateEndless(stmt,
+ loop_entry,
+ body_exit,
+ break_info.break_block());
} else {
- set_current_block(CreateEndless(stmt,
- body_graph->entry_block(),
- body_graph->exit_block(),
- break_info.break_block()));
+ loop_exit = CreateWhile(stmt,
+ loop_entry,
+ cond_false,
+ body_exit,
+ break_info.break_block());
}
+ set_current_block(loop_exit);
}
@@ -3830,9 +3769,11 @@
HInstruction* elements = AddInstruction(new HArgumentsElements);
result = new HArgumentsLength(elements);
} else {
+ Push(graph()->GetArgumentsObject());
VisitForValue(expr->key());
if (HasStackOverflow()) return false;
HValue* key = Pop();
+ Drop(1); // Arguments object.
HInstruction* elements = AddInstruction(new HArgumentsElements);
HInstruction* length = AddInstruction(new HArgumentsLength(elements));
AddInstruction(new HBoundsCheck(key, length));
diff --git a/src/hydrogen.h b/src/hydrogen.h
index bd222f4..1ac4fc4 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -677,12 +677,10 @@
HBasicBlock* second,
int join_id);
HBasicBlock* CreateWhile(IterationStatement* statement,
- HBasicBlock* condition_entry,
- HBasicBlock* exit_block,
- HBasicBlock* body_exit,
- HBasicBlock* break_block,
HBasicBlock* loop_entry,
- HBasicBlock* loop_exit);
+ HBasicBlock* cond_false,
+ HBasicBlock* body_exit,
+ HBasicBlock* break_block);
HBasicBlock* CreateDoWhile(IterationStatement* statement,
HBasicBlock* body_entry,
HBasicBlock* go_back,
@@ -692,11 +690,6 @@
HBasicBlock* body_entry,
HBasicBlock* body_exit,
HBasicBlock* break_block);
- HBasicBlock* CreatePeeledWhile(IterationStatement* stmt,
- HBasicBlock* condition_entry,
- HBasicBlock* exit_block,
- HBasicBlock* body_exit,
- HBasicBlock* break_block);
HBasicBlock* JoinContinue(IterationStatement* statement,
HBasicBlock* exit_block,
HBasicBlock* continue_block);
@@ -746,7 +739,7 @@
HSubgraph* CreateEmptySubgraph();
HSubgraph* CreateGotoSubgraph(HEnvironment* env);
HSubgraph* CreateBranchSubgraph(HEnvironment* env);
- HSubgraph* CreateLoopHeaderSubgraph(HEnvironment* env);
+ HBasicBlock* CreateLoopHeader();
HSubgraph* CreateInlinedSubgraph(HEnvironment* outer,
Handle<JSFunction> target,
FunctionLiteral* function);
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index cb05c38..7efa934 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -3399,7 +3399,7 @@
__ test(edx, Immediate(kSmiTagMask));
__ j(not_zero, &base_nonsmi);
- // Optimized version when both exponent and base is a smi.
+ // Optimized version when both exponent and base are smis.
Label powi;
__ SmiUntag(edx);
__ cvtsi2sd(xmm0, Operand(edx));
@@ -3438,7 +3438,6 @@
__ j(not_carry, &no_multiply);
__ mulsd(xmm1, xmm0);
__ bind(&no_multiply);
- __ test(eax, Operand(eax));
__ mulsd(xmm0, xmm0);
__ j(not_zero, &while_true);
@@ -3525,7 +3524,7 @@
__ AllocateHeapNumber(ecx, eax, edx, &call_runtime);
__ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1);
__ mov(eax, ecx);
- __ ret(2);
+ __ ret(2 * kPointerSize);
__ bind(&call_runtime);
__ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index 770ec0b..3a2753d 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -3526,7 +3526,8 @@
frame_->EmitPush(esi); // The context is the first argument.
frame_->EmitPush(Immediate(pairs));
frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
- Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
+ frame_->EmitPush(Immediate(Smi::FromInt(strict_mode_flag())));
+ Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 4);
// Return value is ignored.
}
@@ -5259,7 +5260,8 @@
// by initialization.
value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
} else {
- value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
+ frame_->Push(Smi::FromInt(strict_mode_flag()));
+ value = frame_->CallRuntime(Runtime::kStoreContextSlot, 4);
}
// Storing a variable must keep the (new) value on the expression
// stack. This is necessary for compiling chained assignment
@@ -5618,8 +5620,9 @@
Load(property->key());
Load(property->value());
if (property->emit_store()) {
+ frame_->Push(Smi::FromInt(NONE)); // PropertyAttributes
// Ignore the result.
- Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
+ Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 4);
} else {
frame_->Drop(3);
}
@@ -8310,6 +8313,7 @@
switch (op) {
case Token::SUB: {
__ neg(value.reg());
+ frame_->Push(&value);
if (node->no_negative_zero()) {
// -MIN_INT is MIN_INT with the overflow flag set.
unsafe_bailout_->Branch(overflow);
@@ -8322,17 +8326,18 @@
}
case Token::BIT_NOT: {
__ not_(value.reg());
+ frame_->Push(&value);
break;
}
case Token::ADD: {
// Unary plus has no effect on int32 values.
+ frame_->Push(&value);
break;
}
default:
UNREACHABLE();
break;
}
- frame_->Push(&value);
} else {
Load(node->expression());
bool can_overwrite = node->expression()->ResultOverwriteAllowed();
@@ -9468,11 +9473,13 @@
DeferredReferenceSetKeyedValue(Register value,
Register key,
Register receiver,
- Register scratch)
+ Register scratch,
+ StrictModeFlag strict_mode)
: value_(value),
key_(key),
receiver_(receiver),
- scratch_(scratch) {
+ scratch_(scratch),
+ strict_mode_(strict_mode) {
set_comment("[ DeferredReferenceSetKeyedValue");
}
@@ -9486,6 +9493,7 @@
Register receiver_;
Register scratch_;
Label patch_site_;
+ StrictModeFlag strict_mode_;
};
@@ -9544,7 +9552,9 @@
}
// Call the IC stub.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
__ call(ic, RelocInfo::CODE_TARGET);
// The delta from the start of the map-compare instruction to the
// test instruction. We use masm_-> directly here instead of the
@@ -9906,7 +9916,8 @@
new DeferredReferenceSetKeyedValue(result.reg(),
key.reg(),
receiver.reg(),
- tmp.reg());
+ tmp.reg(),
+ strict_mode_flag());
// Check that the receiver is not a smi.
__ test(receiver.reg(), Immediate(kSmiTagMask));
@@ -9961,7 +9972,7 @@
deferred->BindExit();
} else {
- result = frame()->CallKeyedStoreIC();
+ result = frame()->CallKeyedStoreIC(strict_mode_flag());
// Make sure that we do not have a test instruction after the
// call. A test instruction after the call is used to
// indicate that we have generated an inline version of the
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 4255347..67e0e8f 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -322,23 +322,6 @@
}
-FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
- Token::Value op, Expression* left, Expression* right) {
- ASSERT(ShouldInlineSmiCase(op));
- if (op == Token::DIV || op == Token::MOD || op == Token::MUL) {
- // We never generate inlined constant smi operations for these.
- return kNoConstants;
- } else if (right->IsSmiLiteral()) {
- return kRightConstant;
- } else if (left->IsSmiLiteral() && !Token::IsShiftOp(op)) {
- // Don't inline shifts with constant left hand side.
- return kLeftConstant;
- } else {
- return kNoConstants;
- }
-}
-
-
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
}
@@ -741,7 +724,9 @@
prop->key()->AsLiteral()->handle()->IsSmi());
__ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(is_strict()
+ ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
}
@@ -758,7 +743,8 @@
__ push(esi); // The context is the first argument.
__ push(Immediate(pairs));
__ push(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
- __ CallRuntime(Runtime::kDeclareGlobals, 3);
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
// Return value is ignored.
}
@@ -1380,7 +1366,9 @@
VisitForAccumulatorValue(value);
__ mov(ecx, Immediate(key->handle()));
__ mov(edx, Operand(esp, 0));
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
@@ -1394,7 +1382,8 @@
VisitForStackValue(key);
VisitForStackValue(value);
if (property->emit_store()) {
- __ CallRuntime(Runtime::kSetProperty, 3);
+ __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
+ __ CallRuntime(Runtime::kSetProperty, 4);
} else {
__ Drop(3);
}
@@ -1572,14 +1561,8 @@
}
Token::Value op = expr->binary_op();
- ConstantOperand constant = ShouldInlineSmiCase(op)
- ? GetConstantOperand(op, expr->target(), expr->value())
- : kNoConstants;
- ASSERT(constant == kRightConstant || constant == kNoConstants);
- if (constant == kNoConstants) {
- __ push(eax); // Left operand goes on the stack.
- VisitForAccumulatorValue(expr->value());
- }
+ __ push(eax); // Left operand goes on the stack.
+ VisitForAccumulatorValue(expr->value());
OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
? OVERWRITE_RIGHT
@@ -1591,8 +1574,7 @@
op,
mode,
expr->target(),
- expr->value(),
- constant);
+ expr->value());
} else {
EmitBinaryOp(op, mode);
}
@@ -1640,220 +1622,11 @@
}
-void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value) {
- NearLabel call_stub, done;
- // Optimistically add smi value with unknown object. If result overflows or is
- // not a smi then we had either a smi overflow or added a smi with a tagged
- // pointer.
- __ add(Operand(eax), Immediate(value));
- __ j(overflow, &call_stub);
- JumpPatchSite patch_site(masm_);
- patch_site.EmitJumpIfSmi(eax, &done);
-
- // Undo the optimistic add operation and call the shared stub.
- __ bind(&call_stub);
- __ sub(Operand(eax), Immediate(value));
- TypeRecordingBinaryOpStub stub(Token::ADD, mode);
- if (left_is_constant_smi) {
- __ mov(edx, Immediate(value));
- } else {
- __ mov(edx, eax);
- __ mov(eax, Immediate(value));
- }
- EmitCallIC(stub.GetCode(), &patch_site);
-
- __ bind(&done);
- context()->Plug(eax);
-}
-
-
-void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value) {
- NearLabel call_stub, done;
- // Optimistically subtract smi value with unknown object. If result overflows
- // or is not a smi then we had either a smi overflow or added a smi with a
- // tagged pointer.
- if (left_is_constant_smi) {
- __ mov(ecx, eax);
- __ mov(eax, Immediate(value));
- __ sub(Operand(eax), ecx);
- } else {
- __ sub(Operand(eax), Immediate(value));
- }
- __ j(overflow, &call_stub);
- JumpPatchSite patch_site(masm_);
- patch_site.EmitJumpIfSmi(eax, &done);
-
- __ bind(&call_stub);
- if (left_is_constant_smi) {
- __ mov(edx, Immediate(value));
- __ mov(eax, ecx);
- } else {
- __ add(Operand(eax), Immediate(value)); // Undo the subtraction.
- __ mov(edx, eax);
- __ mov(eax, Immediate(value));
- }
- TypeRecordingBinaryOpStub stub(Token::SUB, mode);
- EmitCallIC(stub.GetCode(), &patch_site);
-
- __ bind(&done);
- context()->Plug(eax);
-}
-
-
-void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- Smi* value) {
- NearLabel call_stub, smi_case, done;
- int shift_value = value->value() & 0x1f;
-
- JumpPatchSite patch_site(masm_);
- patch_site.EmitJumpIfSmi(eax, &smi_case);
-
- // Call stub.
- __ bind(&call_stub);
- __ mov(edx, eax);
- __ mov(eax, Immediate(value));
- TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), &patch_site);
- __ jmp(&done);
-
- // Smi case.
- __ bind(&smi_case);
- switch (op) {
- case Token::SHL:
- if (shift_value != 0) {
- __ mov(edx, eax);
- if (shift_value > 1) {
- __ shl(edx, shift_value - 1);
- }
- // Convert int result to smi, checking that it is in int range.
- STATIC_ASSERT(kSmiTagSize == 1); // Adjust code if not the case.
- __ add(edx, Operand(edx));
- __ j(overflow, &call_stub);
- __ mov(eax, edx); // Put result back into eax.
- }
- break;
- case Token::SAR:
- if (shift_value != 0) {
- __ sar(eax, shift_value);
- __ and_(eax, ~kSmiTagMask);
- }
- break;
- case Token::SHR:
- // SHR must return a positive value. When shifting by 0 or 1 we need to
- // check that smi tagging the result will not create a negative value.
- if (shift_value < 2) {
- __ mov(edx, eax);
- __ SmiUntag(edx);
- __ shr(edx, shift_value);
- __ test(edx, Immediate(0xc0000000));
- __ j(not_zero, &call_stub);
- __ SmiTag(edx);
- __ mov(eax, edx); // Put result back into eax.
- } else {
- __ SmiUntag(eax);
- __ shr(eax, shift_value);
- __ SmiTag(eax);
- }
- break;
- default:
- UNREACHABLE();
- }
-
- __ bind(&done);
- context()->Plug(eax);
-}
-
-
-void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- Smi* value) {
- NearLabel smi_case, done;
-
- JumpPatchSite patch_site(masm_);
- patch_site.EmitJumpIfSmi(eax, &smi_case);
-
- // The order of the arguments does not matter for bit-ops with a
- // constant operand.
- __ mov(edx, Immediate(value));
- TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), &patch_site);
- __ jmp(&done);
-
- // Smi case.
- __ bind(&smi_case);
- switch (op) {
- case Token::BIT_OR:
- __ or_(Operand(eax), Immediate(value));
- break;
- case Token::BIT_XOR:
- __ xor_(Operand(eax), Immediate(value));
- break;
- case Token::BIT_AND:
- __ and_(Operand(eax), Immediate(value));
- break;
- default:
- UNREACHABLE();
- }
-
- __ bind(&done);
- context()->Plug(eax);
-}
-
-
-void FullCodeGenerator::EmitConstantSmiBinaryOp(Expression* expr,
- Token::Value op,
- OverwriteMode mode,
- bool left_is_constant_smi,
- Smi* value) {
- switch (op) {
- case Token::BIT_OR:
- case Token::BIT_XOR:
- case Token::BIT_AND:
- EmitConstantSmiBitOp(expr, op, mode, value);
- break;
- case Token::SHL:
- case Token::SAR:
- case Token::SHR:
- ASSERT(!left_is_constant_smi);
- EmitConstantSmiShiftOp(expr, op, mode, value);
- break;
- case Token::ADD:
- EmitConstantSmiAdd(expr, mode, left_is_constant_smi, value);
- break;
- case Token::SUB:
- EmitConstantSmiSub(expr, mode, left_is_constant_smi, value);
- break;
- default:
- UNREACHABLE();
- }
-}
-
-
void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Expression* left,
- Expression* right,
- ConstantOperand constant) {
- if (constant == kRightConstant) {
- Smi* value = Smi::cast(*right->AsLiteral()->handle());
- EmitConstantSmiBinaryOp(expr, op, mode, false, value);
- return;
- } else if (constant == kLeftConstant) {
- Smi* value = Smi::cast(*left->AsLiteral()->handle());
- EmitConstantSmiBinaryOp(expr, op, mode, true, value);
- return;
- }
-
+ Expression* right) {
// Do combined smi check of the operands. Left operand is on the
// stack. Right operand is in eax.
NearLabel done, smi_case, stub_call;
@@ -1985,7 +1758,9 @@
__ mov(edx, eax);
__ pop(eax); // Restore value.
__ mov(ecx, prop->key()->AsLiteral()->handle());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
@@ -2006,7 +1781,9 @@
__ pop(edx);
}
__ pop(eax); // Restore value.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
@@ -2101,7 +1878,8 @@
__ push(eax); // Value.
__ push(esi); // Context.
__ push(Immediate(var->name()));
- __ CallRuntime(Runtime::kStoreContextSlot, 3);
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
break;
}
}
@@ -2132,7 +1910,9 @@
} else {
__ pop(edx);
}
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
@@ -2170,7 +1950,9 @@
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
@@ -2283,6 +2065,27 @@
}
+void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
+ int arg_count) {
+ // Push copy of the first argument or undefined if it doesn't exist.
+ if (arg_count > 0) {
+ __ push(Operand(esp, arg_count * kPointerSize));
+ } else {
+ __ push(Immediate(Factory::undefined_value()));
+ }
+
+ // Push the receiver of the enclosing function.
+ __ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize));
+
+ // Push the strict mode flag.
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
+ : Runtime::kResolvePossiblyDirectEval, 4);
+}
+
+
void FullCodeGenerator::VisitCall(Call* expr) {
#ifdef DEBUG
// We want to verify that RecordJSReturnSite gets called on all paths
@@ -2311,21 +2114,30 @@
VisitForStackValue(args->at(i));
}
- // Push copy of the function - found below the arguments.
- __ push(Operand(esp, (arg_count + 1) * kPointerSize));
-
- // Push copy of the first argument or undefined if it doesn't exist.
- if (arg_count > 0) {
- __ push(Operand(esp, arg_count * kPointerSize));
- } else {
- __ push(Immediate(Factory::undefined_value()));
+ // If we know that eval can only be shadowed by eval-introduced
+ // variables we attempt to load the global eval function directly
+ // in generated code. If we succeed, there is no need to perform a
+ // context lookup in the runtime system.
+ Label done;
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ Label slow;
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
+ NOT_INSIDE_TYPEOF,
+ &slow);
+ // Push the function and resolve eval.
+ __ push(eax);
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
+ __ jmp(&done);
+ __ bind(&slow);
}
- // Push the receiver of the enclosing function and do runtime call.
- __ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize));
- // Push the strict mode flag.
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
+ // Push copy of the function (found below the arguments) and
+ // resolve eval.
+ __ push(Operand(esp, (arg_count + 1) * kPointerSize));
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
+ if (done.is_linked()) {
+ __ bind(&done);
+ }
// The runtime call returns a pair of values in eax (function) and
// edx (receiver). Touch up the stack with the right values.
@@ -3418,7 +3230,7 @@
void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
Label bailout, done, one_char_separator, long_separator,
- non_trivial_array, not_size_one_array, loop, loop_condition,
+ non_trivial_array, not_size_one_array, loop,
loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
ASSERT(args->length() == 2);
@@ -3460,7 +3272,7 @@
// If the array has length zero, return the empty string.
__ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
- __ sar(array_length, 1);
+ __ SmiUntag(array_length);
__ j(not_zero, &non_trivial_array);
__ mov(result_operand, Factory::empty_string());
__ jmp(&done);
@@ -3483,14 +3295,15 @@
// Loop condition: while (index < length).
// Live loop registers: index, array_length, string,
// scratch, string_length, elements.
- __ jmp(&loop_condition);
+ if (FLAG_debug_code) {
+ __ cmp(index, Operand(array_length));
+ __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
+ }
__ bind(&loop);
- __ cmp(index, Operand(array_length));
- __ j(greater_equal, &done);
-
- __ mov(string, FieldOperand(elements, index,
- times_pointer_size,
- FixedArray::kHeaderSize));
+ __ mov(string, FieldOperand(elements,
+ index,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
__ test(string, Immediate(kSmiTagMask));
__ j(zero, &bailout);
__ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
@@ -3503,7 +3316,6 @@
FieldOperand(string, SeqAsciiString::kLengthOffset));
__ j(overflow, &bailout);
__ add(Operand(index), Immediate(1));
- __ bind(&loop_condition);
__ cmp(index, Operand(array_length));
__ j(less, &loop);
@@ -3532,7 +3344,7 @@
__ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
__ and_(scratch, Immediate(
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
- __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
+ __ cmp(scratch, ASCII_STRING_TYPE);
__ j(not_equal, &bailout);
// Add (separator length times array_length) - separator length
@@ -4025,7 +3837,9 @@
case NAMED_PROPERTY: {
__ mov(ecx, prop->key()->AsLiteral()->handle());
__ pop(edx);
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
@@ -4040,7 +3854,9 @@
case KEYED_PROPERTY: {
__ pop(ecx);
__ pop(edx);
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index 73cd60d..6b9e749 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -761,7 +761,8 @@
}
-void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
+void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : key
@@ -801,7 +802,7 @@
// Slow case: call runtime.
__ bind(&slow);
- GenerateRuntimeSetProperty(masm);
+ GenerateRuntimeSetProperty(masm, strict_mode);
// Check whether the elements is a pixel array.
__ bind(&check_pixel_array);
@@ -1488,7 +1489,7 @@
void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
- Code::ExtraICState extra_ic_state) {
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : name
@@ -1499,7 +1500,7 @@
Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
NOT_IN_LOOP,
MONOMORPHIC,
- extra_ic_state);
+ strict_mode);
StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
// Cache miss: Jump to runtime.
@@ -1617,7 +1618,8 @@
}
-void StoreIC::GenerateGlobalProxy(MacroAssembler* masm) {
+void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : name
@@ -1628,14 +1630,17 @@
__ push(edx);
__ push(ecx);
__ push(eax);
- __ push(ebx);
+ __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
+ __ push(Immediate(Smi::FromInt(strict_mode)));
+ __ push(ebx); // return address
// Do tail-call to runtime routine.
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
}
-void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm) {
+void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : key
@@ -1647,10 +1652,12 @@
__ push(edx);
__ push(ecx);
__ push(eax);
- __ push(ebx);
+ __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
+ __ push(Immediate(Smi::FromInt(strict_mode))); // Strict mode.
+ __ push(ebx); // return address
// Do tail-call to runtime routine.
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
}
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index c7424a5..0b345d3 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -588,7 +588,8 @@
Handle<DeoptimizationInputData> data =
Factory::NewDeoptimizationInputData(length, TENURED);
- data->SetTranslationByteArray(*translations_.CreateByteArray());
+ Handle<ByteArray> translations = translations_.CreateByteArray();
+ data->SetTranslationByteArray(*translations);
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
Handle<FixedArray> literals =
@@ -1912,12 +1913,7 @@
__ bind(&before_push_delta);
__ mov(temp, Immediate(delta));
__ StoreToSafepointRegisterSlot(temp, temp);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
- ASSERT_EQ(kAdditionalDelta,
- masm_->SizeOfCodeGeneratedSince(&before_push_delta));
- RecordSafepointWithRegisters(
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
// Put the result value into the eax slot and restore all registers.
__ StoreToSafepointRegisterSlot(eax, eax);
__ PopSafepointRegisters();
@@ -2786,7 +2782,9 @@
ASSERT(ToRegister(instr->value()).is(eax));
__ mov(ecx, instr->name());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
@@ -2854,7 +2852,9 @@
ASSERT(ToRegister(instr->key()).is(ecx));
ASSERT(ToRegister(instr->value()).is(eax));
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index 221a7aa..ea6d41a 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -870,10 +870,18 @@
ASSERT(instr->representation().IsDouble());
ASSERT(instr->left()->representation().IsDouble());
ASSERT(instr->right()->representation().IsDouble());
- LOperand* left = UseRegisterAtStart(instr->left());
- LOperand* right = UseRegisterAtStart(instr->right());
- LArithmeticD* result = new LArithmeticD(op, left, right);
- return DefineSameAsFirst(result);
+ if (op == Token::MOD) {
+ LOperand* left = UseFixedDouble(instr->left(), xmm2);
+ LOperand* right = UseFixedDouble(instr->right(), xmm1);
+ LArithmeticD* result = new LArithmeticD(op, left, right);
+ return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
+
+ } else {
+ LOperand* left = UseRegisterAtStart(instr->left());
+ LOperand* right = UseRegisterAtStart(instr->right());
+ LArithmeticD* result = new LArithmeticD(op, left, right);
+ return DefineSameAsFirst(result);
+ }
}
@@ -1165,8 +1173,7 @@
new LInstanceOfKnownGlobal(
UseFixed(instr->value(), InstanceofStub::left()),
FixedTemp(edi));
- MarkAsSaveDoubles(result);
- return AssignEnvironment(AssignPointerMap(DefineFixed(result, eax)));
+ return MarkAsCall(DefineFixed(result, eax), instr);
}
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 51cc46a..633097a 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -2552,12 +2552,13 @@
__ push(edx); // receiver
__ push(ecx); // name
__ push(eax); // value
+ __ push(Immediate(Smi::FromInt(strict_mode_)));
__ push(ebx); // restore return address
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
- __ TailCallExternalReference(store_ic_property, 3, 1);
+ __ TailCallExternalReference(store_ic_property, 4, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -3712,10 +3713,13 @@
__ push(edx);
__ push(ecx);
__ push(eax);
- __ push(ebx);
+ __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
+ __ push(Immediate(Smi::FromInt(
+ Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
+ __ push(ebx); // return address
// Do tail-call to runtime routine.
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
return GetCode(flags);
}
diff --git a/src/ia32/virtual-frame-ia32.cc b/src/ia32/virtual-frame-ia32.cc
index 515a9fe..93d711e 100644
--- a/src/ia32/virtual-frame-ia32.cc
+++ b/src/ia32/virtual-frame-ia32.cc
@@ -1038,9 +1038,9 @@
StrictModeFlag strict_mode) {
// Value and (if not contextual) receiver are on top of the frame.
// The IC expects name in ecx, value in eax, and receiver in edx.
- Handle<Code> ic(Builtins::builtin(strict_mode == kStrictMode
- ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
Result value = Pop();
RelocInfo::Mode mode;
@@ -1061,7 +1061,7 @@
}
-Result VirtualFrame::CallKeyedStoreIC() {
+Result VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) {
// Value, key, and receiver are on the top of the frame. The IC
// expects value in eax, key in ecx, and receiver in edx.
Result value = Pop();
@@ -1105,7 +1105,9 @@
receiver.Unuse();
}
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
}
@@ -1337,6 +1339,20 @@
}
+void VirtualFrame::Push(Handle<Object> value) {
+ if (ConstantPoolOverflowed()) {
+ Result temp = cgen()->allocator()->Allocate();
+ ASSERT(temp.is_valid());
+ __ Set(temp.reg(), Immediate(value));
+ Push(&temp);
+ } else {
+ FrameElement element =
+ FrameElement::ConstantElement(value, FrameElement::NOT_SYNCED);
+ elements_.Add(element);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/ia32/virtual-frame-ia32.h b/src/ia32/virtual-frame-ia32.h
index 93362b4..5187430 100644
--- a/src/ia32/virtual-frame-ia32.h
+++ b/src/ia32/virtual-frame-ia32.h
@@ -370,7 +370,7 @@
// Call keyed store IC. Value, key, and receiver are found on top
// of the frame. All three are dropped.
- Result CallKeyedStoreIC();
+ Result CallKeyedStoreIC(StrictModeFlag strict_mode);
// Call call IC. Function name, arguments, and receiver are found on top
// of the frame and dropped by the call. The argument count does not
@@ -422,8 +422,8 @@
inline bool ConstantPoolOverflowed();
// Push an element on the virtual frame.
+ void Push(Handle<Object> value);
inline void Push(Register reg, TypeInfo info = TypeInfo::Unknown());
- inline void Push(Handle<Object> value);
inline void Push(Smi* value);
void PushUntaggedElement(Handle<Object> value);
diff --git a/src/ic-inl.h b/src/ic-inl.h
index 8fbc184..9d358ed 100644
--- a/src/ic-inl.h
+++ b/src/ic-inl.h
@@ -76,6 +76,15 @@
void IC::SetTargetAtAddress(Address address, Code* target) {
ASSERT(target->is_inline_cache_stub() || target->is_compare_ic_stub());
+#ifdef DEBUG
+ // STORE_IC and KEYED_STORE_IC use Code::extra_ic_state() to mark
+ // ICs as strict mode. The strict-ness of the IC must be preserved.
+ Code* old_target = GetTargetAtAddress(address);
+ if (old_target->kind() == Code::STORE_IC ||
+ old_target->kind() == Code::KEYED_STORE_IC) {
+ ASSERT(old_target->extra_ic_state() == target->extra_ic_state());
+ }
+#endif
Assembler::set_target_address_at(address, target->instruction_start());
}
diff --git a/src/ic.cc b/src/ic.cc
index 4f75ade..2d666d6 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -343,7 +343,7 @@
if (target->ic_state() == UNINITIALIZED) return;
ClearInlinedVersion(address);
SetTargetAtAddress(address,
- target->extra_ic_state() == kStoreICStrict
+ (target->extra_ic_state() == kStrictMode)
? initialize_stub_strict()
: initialize_stub());
}
@@ -366,7 +366,10 @@
void KeyedStoreIC::Clear(Address address, Code* target) {
if (target->ic_state() == UNINITIALIZED) return;
- SetTargetAtAddress(address, initialize_stub());
+ SetTargetAtAddress(address,
+ (target->extra_ic_state() == kStrictMode)
+ ? initialize_stub_strict()
+ : initialize_stub());
}
@@ -1227,7 +1230,8 @@
if (receiver->HasExternalArrayElements()) {
MaybeObject* probe =
StubCache::ComputeKeyedLoadOrStoreExternalArray(*receiver,
- false);
+ false,
+ kNonStrictMode);
stub = probe->IsFailure() ?
NULL : Code::cast(probe->ToObjectUnchecked());
} else if (receiver->HasIndexedInterceptor()) {
@@ -1383,7 +1387,7 @@
MaybeObject* StoreIC::Store(State state,
- Code::ExtraICState extra_ic_state,
+ StrictModeFlag strict_mode,
Handle<Object> object,
Handle<String> name,
Handle<Object> value) {
@@ -1413,11 +1417,11 @@
#ifdef DEBUG
if (FLAG_trace_ic) PrintF("[StoreIC : +#length /array]\n");
#endif
- Builtins::Name target = (extra_ic_state == kStoreICStrict)
+ Builtins::Name target = (strict_mode == kStrictMode)
? Builtins::StoreIC_ArrayLength_Strict
: Builtins::StoreIC_ArrayLength;
set_target(Builtins::builtin(target));
- return receiver->SetProperty(*name, *value, NONE);
+ return receiver->SetProperty(*name, *value, NONE, strict_mode);
}
// Lookup the property locally in the receiver.
@@ -1441,13 +1445,15 @@
// Index is an offset from the end of the object.
int offset = map->instance_size() + (index * kPointerSize);
if (PatchInlinedStore(address(), map, offset)) {
- set_target(megamorphic_stub());
+ set_target((strict_mode == kStrictMode)
+ ? megamorphic_stub_strict()
+ : megamorphic_stub());
#ifdef DEBUG
if (FLAG_trace_ic) {
PrintF("[StoreIC : inline patch %s]\n", *name->ToCString());
}
#endif
- return receiver->SetProperty(*name, *value, NONE);
+ return receiver->SetProperty(*name, *value, NONE, strict_mode);
#ifdef DEBUG
} else {
@@ -1474,11 +1480,16 @@
// If no inlined store ic was patched, generate a stub for this
// store.
- UpdateCaches(&lookup, state, extra_ic_state, receiver, name, value);
+ UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
} else {
- // Strict mode doesn't allow setting non-existent global property.
- if (extra_ic_state == kStoreICStrict && IsContextual(object)) {
- return ReferenceError("not_defined", name);
+ // Strict mode doesn't allow setting non-existent global property
+ // or an assignment to a read only property.
+ if (strict_mode == kStrictMode) {
+ if (lookup.IsFound() && lookup.IsReadOnly()) {
+ return TypeError("strict_read_only_property", object, name);
+ } else if (IsContextual(object)) {
+ return ReferenceError("not_defined", name);
+ }
}
}
}
@@ -1486,7 +1497,7 @@
if (receiver->IsJSGlobalProxy()) {
// Generate a generic stub that goes to the runtime when we see a global
// proxy as receiver.
- Code* stub = (extra_ic_state == kStoreICStrict)
+ Code* stub = (strict_mode == kStrictMode)
? global_proxy_stub_strict()
: global_proxy_stub();
if (target() != stub) {
@@ -1498,13 +1509,13 @@
}
// Set the property.
- return receiver->SetProperty(*name, *value, NONE);
+ return receiver->SetProperty(*name, *value, NONE, strict_mode);
}
void StoreIC::UpdateCaches(LookupResult* lookup,
State state,
- Code::ExtraICState extra_ic_state,
+ StrictModeFlag strict_mode,
Handle<JSObject> receiver,
Handle<String> name,
Handle<Object> value) {
@@ -1526,7 +1537,7 @@
switch (type) {
case FIELD: {
maybe_code = StubCache::ComputeStoreField(
- *name, *receiver, lookup->GetFieldIndex(), NULL, extra_ic_state);
+ *name, *receiver, lookup->GetFieldIndex(), NULL, strict_mode);
break;
}
case MAP_TRANSITION: {
@@ -1536,7 +1547,7 @@
Handle<Map> transition(lookup->GetTransitionMap());
int index = transition->PropertyIndexFor(*name);
maybe_code = StubCache::ComputeStoreField(
- *name, *receiver, index, *transition, extra_ic_state);
+ *name, *receiver, index, *transition, strict_mode);
break;
}
case NORMAL: {
@@ -1548,10 +1559,10 @@
JSGlobalPropertyCell* cell =
JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
maybe_code = StubCache::ComputeStoreGlobal(
- *name, *global, cell, extra_ic_state);
+ *name, *global, cell, strict_mode);
} else {
if (lookup->holder() != *receiver) return;
- maybe_code = StubCache::ComputeStoreNormal(extra_ic_state);
+ maybe_code = StubCache::ComputeStoreNormal(strict_mode);
}
break;
}
@@ -1560,13 +1571,13 @@
AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
if (v8::ToCData<Address>(callback->setter()) == 0) return;
maybe_code = StubCache::ComputeStoreCallback(
- *name, *receiver, callback, extra_ic_state);
+ *name, *receiver, callback, strict_mode);
break;
}
case INTERCEPTOR: {
ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined());
maybe_code = StubCache::ComputeStoreInterceptor(
- *name, *receiver, extra_ic_state);
+ *name, *receiver, strict_mode);
break;
}
default:
@@ -1583,7 +1594,7 @@
} else if (state == MONOMORPHIC) {
// Only move to megamorphic if the target changes.
if (target() != Code::cast(code)) {
- set_target(extra_ic_state == kStoreICStrict
+ set_target((strict_mode == kStrictMode)
? megamorphic_stub_strict()
: megamorphic_stub());
}
@@ -1599,6 +1610,7 @@
MaybeObject* KeyedStoreIC::Store(State state,
+ StrictModeFlag strict_mode,
Handle<Object> object,
Handle<Object> key,
Handle<Object> value) {
@@ -1630,11 +1642,11 @@
// Update inline cache and stub cache.
if (FLAG_use_ic) {
- UpdateCaches(&lookup, state, receiver, name, value);
+ UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
}
// Set the property.
- return receiver->SetProperty(*name, *value, NONE);
+ return receiver->SetProperty(*name, *value, NONE, strict_mode);
}
// Do not use ICs for objects that require access checks (including
@@ -1643,23 +1655,25 @@
ASSERT(!(use_ic && object->IsJSGlobalProxy()));
if (use_ic) {
- Code* stub = generic_stub();
+ Code* stub =
+ (strict_mode == kStrictMode) ? generic_stub_strict() : generic_stub();
if (state == UNINITIALIZED) {
if (object->IsJSObject()) {
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
if (receiver->HasExternalArrayElements()) {
MaybeObject* probe =
- StubCache::ComputeKeyedLoadOrStoreExternalArray(*receiver, true);
+ StubCache::ComputeKeyedLoadOrStoreExternalArray(
+ *receiver, true, strict_mode);
stub = probe->IsFailure() ?
NULL : Code::cast(probe->ToObjectUnchecked());
} else if (receiver->HasPixelElements()) {
MaybeObject* probe =
- StubCache::ComputeKeyedStorePixelArray(*receiver);
+ StubCache::ComputeKeyedStorePixelArray(*receiver, strict_mode);
stub = probe->IsFailure() ?
NULL : Code::cast(probe->ToObjectUnchecked());
} else if (key->IsSmi() && receiver->map()->has_fast_elements()) {
MaybeObject* probe =
- StubCache::ComputeKeyedStoreSpecialized(*receiver);
+ StubCache::ComputeKeyedStoreSpecialized(*receiver, strict_mode);
stub = probe->IsFailure() ?
NULL : Code::cast(probe->ToObjectUnchecked());
}
@@ -1669,12 +1683,13 @@
}
// Set the property.
- return Runtime::SetObjectProperty(object, key, value, NONE);
+ return Runtime::SetObjectProperty(object, key, value, NONE, strict_mode);
}
void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
State state,
+ StrictModeFlag strict_mode,
Handle<JSObject> receiver,
Handle<String> name,
Handle<Object> value) {
@@ -1701,8 +1716,8 @@
switch (type) {
case FIELD: {
- maybe_code = StubCache::ComputeKeyedStoreField(*name, *receiver,
- lookup->GetFieldIndex());
+ maybe_code = StubCache::ComputeKeyedStoreField(
+ *name, *receiver, lookup->GetFieldIndex(), NULL, strict_mode);
break;
}
case MAP_TRANSITION: {
@@ -1711,8 +1726,8 @@
ASSERT(type == MAP_TRANSITION);
Handle<Map> transition(lookup->GetTransitionMap());
int index = transition->PropertyIndexFor(*name);
- maybe_code = StubCache::ComputeKeyedStoreField(*name, *receiver,
- index, *transition);
+ maybe_code = StubCache::ComputeKeyedStoreField(
+ *name, *receiver, index, *transition, strict_mode);
break;
}
// fall through.
@@ -1720,7 +1735,9 @@
default: {
// Always rewrite to the generic case so that we do not
// repeatedly try to rewrite.
- maybe_code = generic_stub();
+ maybe_code = (strict_mode == kStrictMode)
+ ? generic_stub_strict()
+ : generic_stub();
break;
}
}
@@ -1735,7 +1752,9 @@
if (state == UNINITIALIZED || state == PREMONOMORPHIC) {
set_target(Code::cast(code));
} else if (state == MONOMORPHIC) {
- set_target(megamorphic_stub());
+ set_target((strict_mode == kStrictMode)
+ ? megamorphic_stub_strict()
+ : megamorphic_stub());
}
#ifdef DEBUG
@@ -1836,8 +1855,11 @@
StoreIC ic;
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
- return ic.Store(state, extra_ic_state, args.at<Object>(0),
- args.at<String>(1), args.at<Object>(2));
+ return ic.Store(state,
+ static_cast<StrictModeFlag>(extra_ic_state & kStrictMode),
+ args.at<Object>(0),
+ args.at<String>(1),
+ args.at<Object>(2));
}
@@ -1901,7 +1923,11 @@
ASSERT(args.length() == 3);
KeyedStoreIC ic;
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
- return ic.Store(state, args.at<Object>(0), args.at<Object>(1),
+ Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
+ return ic.Store(state,
+ static_cast<StrictModeFlag>(extra_ic_state & kStrictMode),
+ args.at<Object>(0),
+ args.at<Object>(1),
args.at<Object>(2));
}
diff --git a/src/ic.h b/src/ic.h
index 96838c7..e12cbaf 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -398,16 +398,10 @@
class StoreIC: public IC {
public:
-
- enum StoreICStrictMode {
- kStoreICNonStrict = kNonStrictMode,
- kStoreICStrict = kStrictMode
- };
-
StoreIC() : IC(NO_EXTRA_FRAME) { ASSERT(target()->is_store_stub()); }
MUST_USE_RESULT MaybeObject* Store(State state,
- Code::ExtraICState extra_ic_state,
+ StrictModeFlag strict_mode,
Handle<Object> object,
Handle<String> name,
Handle<Object> value);
@@ -416,10 +410,11 @@
static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
static void GenerateMiss(MacroAssembler* masm);
static void GenerateMegamorphic(MacroAssembler* masm,
- Code::ExtraICState extra_ic_state);
+ StrictModeFlag strict_mode);
static void GenerateArrayLength(MacroAssembler* masm);
static void GenerateNormal(MacroAssembler* masm);
- static void GenerateGlobalProxy(MacroAssembler* masm);
+ static void GenerateGlobalProxy(MacroAssembler* masm,
+ StrictModeFlag strict_mode);
// Clear the use of an inlined version.
static void ClearInlinedVersion(Address address);
@@ -433,11 +428,18 @@
// lookup result.
void UpdateCaches(LookupResult* lookup,
State state,
- Code::ExtraICState extra_ic_state,
+ StrictModeFlag strict_mode,
Handle<JSObject> receiver,
Handle<String> name,
Handle<Object> value);
+ void set_target(Code* code) {
+ // Strict mode must be preserved across IC patching.
+ ASSERT((code->extra_ic_state() & kStrictMode) ==
+ (target()->extra_ic_state() & kStrictMode));
+ IC::set_target(code);
+ }
+
// Stub accessors.
static Code* megamorphic_stub() {
return Builtins::builtin(Builtins::StoreIC_Megamorphic);
@@ -473,6 +475,7 @@
KeyedStoreIC() : IC(NO_EXTRA_FRAME) { }
MUST_USE_RESULT MaybeObject* Store(State state,
+ StrictModeFlag strict_mode,
Handle<Object> object,
Handle<Object> name,
Handle<Object> value);
@@ -480,8 +483,9 @@
// Code generators for stub routines. Only called once at startup.
static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
static void GenerateMiss(MacroAssembler* masm);
- static void GenerateRuntimeSetProperty(MacroAssembler* masm);
- static void GenerateGeneric(MacroAssembler* masm);
+ static void GenerateRuntimeSetProperty(MacroAssembler* masm,
+ StrictModeFlag strict_mode);
+ static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode);
// Clear the inlined version so the IC is always hit.
static void ClearInlinedVersion(Address address);
@@ -493,20 +497,37 @@
// Update the inline cache.
void UpdateCaches(LookupResult* lookup,
State state,
+ StrictModeFlag strict_mode,
Handle<JSObject> receiver,
Handle<String> name,
Handle<Object> value);
+ void set_target(Code* code) {
+ // Strict mode must be preserved across IC patching.
+ ASSERT((code->extra_ic_state() & kStrictMode) ==
+ (target()->extra_ic_state() & kStrictMode));
+ IC::set_target(code);
+ }
+
// Stub accessors.
static Code* initialize_stub() {
return Builtins::builtin(Builtins::KeyedStoreIC_Initialize);
}
+ static Code* initialize_stub_strict() {
+ return Builtins::builtin(Builtins::KeyedStoreIC_Initialize_Strict);
+ }
static Code* megamorphic_stub() {
return Builtins::builtin(Builtins::KeyedStoreIC_Generic);
}
+ static Code* megamorphic_stub_strict() {
+ return Builtins::builtin(Builtins::KeyedStoreIC_Generic_Strict);
+ }
static Code* generic_stub() {
return Builtins::builtin(Builtins::KeyedStoreIC_Generic);
}
+ static Code* generic_stub_strict() {
+ return Builtins::builtin(Builtins::KeyedStoreIC_Generic_Strict);
+ }
static void Clear(Address address, Code* target);
diff --git a/src/liveobjectlist-inl.h b/src/liveobjectlist-inl.h
index 997da4e..f742de3 100644
--- a/src/liveobjectlist-inl.h
+++ b/src/liveobjectlist-inl.h
@@ -32,5 +32,95 @@
#include "liveobjectlist.h"
+namespace v8 {
+namespace internal {
+
+#ifdef LIVE_OBJECT_LIST
+
+void LiveObjectList::GCEpilogue() {
+ if (!NeedLOLProcessing()) return;
+ GCEpiloguePrivate();
+}
+
+
+void LiveObjectList::GCPrologue() {
+ if (!NeedLOLProcessing()) return;
+#ifdef VERIFY_LOL
+ if (FLAG_verify_lol) {
+ Verify();
+ }
+#endif
+}
+
+
+void LiveObjectList::IterateElements(ObjectVisitor* v) {
+ if (!NeedLOLProcessing()) return;
+ IterateElementsPrivate(v);
+}
+
+
+void LiveObjectList::ProcessNonLive(HeapObject *obj) {
+ // Only do work if we have at least one list to process.
+ if (last()) DoProcessNonLive(obj);
+}
+
+
+void LiveObjectList::UpdateReferencesForScavengeGC() {
+ if (LiveObjectList::NeedLOLProcessing()) {
+ UpdateLiveObjectListVisitor update_visitor;
+ LiveObjectList::IterateElements(&update_visitor);
+ }
+}
+
+
+LiveObjectList* LiveObjectList::FindLolForId(int id,
+ LiveObjectList* start_lol) {
+ if (id != 0) {
+ LiveObjectList* lol = start_lol;
+ while (lol != NULL) {
+ if (lol->id() == id) {
+ return lol;
+ }
+ lol = lol->prev_;
+ }
+ }
+ return NULL;
+}
+
+
+// Iterates the elements in every lol and returns the one that matches the
+// specified key. If no matching element is found, then it returns NULL.
+template <typename T>
+inline LiveObjectList::Element*
+LiveObjectList::FindElementFor(T (*GetValue)(LiveObjectList::Element*), T key) {
+ LiveObjectList *lol = last();
+ while (lol != NULL) {
+ Element* elements = lol->elements_;
+ for (int i = 0; i < lol->obj_count_; i++) {
+ Element* element = &elements[i];
+ if (GetValue(element) == key) {
+ return element;
+ }
+ }
+ lol = lol->prev_;
+ }
+ return NULL;
+}
+
+
+inline int LiveObjectList::GetElementId(LiveObjectList::Element* element) {
+ return element->id_;
+}
+
+
+inline HeapObject*
+LiveObjectList::GetElementObj(LiveObjectList::Element* element) {
+ return element->obj_;
+}
+
+#endif // LIVE_OBJECT_LIST
+
+} } // namespace v8::internal
+
#endif // V8_LIVEOBJECTLIST_INL_H_
diff --git a/src/liveobjectlist.cc b/src/liveobjectlist.cc
index 28a3d6d..cd6fcf9 100644
--- a/src/liveobjectlist.cc
+++ b/src/liveobjectlist.cc
@@ -37,7 +37,7 @@
#include "heap.h"
#include "inspector.h"
#include "list-inl.h"
-#include "liveobjectlist.h"
+#include "liveobjectlist-inl.h"
#include "string-stream.h"
#include "top.h"
#include "v8utils.h"
@@ -46,6 +46,2480 @@
namespace internal {
+typedef int (*RawComparer)(const void*, const void*);
+
+
+#ifdef CHECK_ALL_OBJECT_TYPES
+
+#define DEBUG_LIVE_OBJECT_TYPES(v) \
+ v(Smi, "unexpected: Smi") \
+ \
+ v(CodeCache, "unexpected: CodeCache") \
+ v(BreakPointInfo, "unexpected: BreakPointInfo") \
+ v(DebugInfo, "unexpected: DebugInfo") \
+ v(TypeSwitchInfo, "unexpected: TypeSwitchInfo") \
+ v(SignatureInfo, "unexpected: SignatureInfo") \
+ v(Script, "unexpected: Script") \
+ v(ObjectTemplateInfo, "unexpected: ObjectTemplateInfo") \
+ v(FunctionTemplateInfo, "unexpected: FunctionTemplateInfo") \
+ v(CallHandlerInfo, "unexpected: CallHandlerInfo") \
+ v(InterceptorInfo, "unexpected: InterceptorInfo") \
+ v(AccessCheckInfo, "unexpected: AccessCheckInfo") \
+ v(AccessorInfo, "unexpected: AccessorInfo") \
+ v(ExternalTwoByteString, "unexpected: ExternalTwoByteString") \
+ v(ExternalAsciiString, "unexpected: ExternalAsciiString") \
+ v(ExternalString, "unexpected: ExternalString") \
+ v(SeqTwoByteString, "unexpected: SeqTwoByteString") \
+ v(SeqAsciiString, "unexpected: SeqAsciiString") \
+ v(SeqString, "unexpected: SeqString") \
+ v(JSFunctionResultCache, "unexpected: JSFunctionResultCache") \
+ v(GlobalContext, "unexpected: GlobalContext") \
+ v(MapCache, "unexpected: MapCache") \
+ v(CodeCacheHashTable, "unexpected: CodeCacheHashTable") \
+ v(CompilationCacheTable, "unexpected: CompilationCacheTable") \
+ v(SymbolTable, "unexpected: SymbolTable") \
+ v(Dictionary, "unexpected: Dictionary") \
+ v(HashTable, "unexpected: HashTable") \
+ v(DescriptorArray, "unexpected: DescriptorArray") \
+ v(ExternalFloatArray, "unexpected: ExternalFloatArray") \
+ v(ExternalUnsignedIntArray, "unexpected: ExternalUnsignedIntArray") \
+ v(ExternalIntArray, "unexpected: ExternalIntArray") \
+ v(ExternalUnsignedShortArray, "unexpected: ExternalUnsignedShortArray") \
+ v(ExternalShortArray, "unexpected: ExternalShortArray") \
+ v(ExternalUnsignedByteArray, "unexpected: ExternalUnsignedByteArray") \
+ v(ExternalByteArray, "unexpected: ExternalByteArray") \
+ v(JSValue, "unexpected: JSValue")
+
+#else
+#define DEBUG_LIVE_OBJECT_TYPES(v)
+#endif
+
+
+#define FOR_EACH_LIVE_OBJECT_TYPE(v) \
+ DEBUG_LIVE_OBJECT_TYPES(v) \
+ \
+ v(JSArray, "JSArray") \
+ v(JSRegExp, "JSRegExp") \
+ v(JSFunction, "JSFunction") \
+ v(JSGlobalObject, "JSGlobal") \
+ v(JSBuiltinsObject, "JSBuiltins") \
+ v(GlobalObject, "Global") \
+ v(JSGlobalProxy, "JSGlobalProxy") \
+ v(JSObject, "JSObject") \
+ \
+ v(Context, "meta: Context") \
+ v(ByteArray, "meta: ByteArray") \
+ v(PixelArray, "meta: PixelArray") \
+ v(ExternalArray, "meta: ExternalArray") \
+ v(FixedArray, "meta: FixedArray") \
+ v(String, "String") \
+ v(HeapNumber, "HeapNumber") \
+ \
+ v(Code, "meta: Code") \
+ v(Map, "meta: Map") \
+ v(Oddball, "Oddball") \
+ v(Proxy, "meta: Proxy") \
+ v(SharedFunctionInfo, "meta: SharedFunctionInfo") \
+ v(Struct, "meta: Struct") \
+ \
+ v(HeapObject, "HeapObject")
+
+
+enum /* LiveObjectType */ {
+#define DECLARE_OBJECT_TYPE_ENUM(type, name) kType##type,
+ FOR_EACH_LIVE_OBJECT_TYPE(DECLARE_OBJECT_TYPE_ENUM)
+ kInvalidLiveObjType,
+ kNumberOfTypes
+#undef DECLARE_OBJECT_TYPE_ENUM
+};
+
+
+LiveObjectType GetObjectType(HeapObject* heap_obj) {
+ // TODO(mlam): investigate usint Map::instance_type() instead.
+#define CHECK_FOR_OBJECT_TYPE(type, name) \
+ if (heap_obj->Is##type()) return kType##type;
+ FOR_EACH_LIVE_OBJECT_TYPE(CHECK_FOR_OBJECT_TYPE)
+#undef CHECK_FOR_OBJECT_TYPE
+
+ UNREACHABLE();
+ return kInvalidLiveObjType;
+}
+
+
+inline const char* GetObjectTypeDesc(LiveObjectType type) {
+ static const char* const name[kNumberOfTypes] = {
+ #define DEFINE_OBJECT_TYPE_NAME(type, name) name,
+ FOR_EACH_LIVE_OBJECT_TYPE(DEFINE_OBJECT_TYPE_NAME)
+ "invalid"
+ #undef DEFINE_OBJECT_TYPE_NAME
+ };
+ ASSERT(type < kNumberOfTypes);
+ return name[type];
+}
+
+
+const char* GetObjectTypeDesc(HeapObject* heap_obj) {
+ LiveObjectType type = GetObjectType(heap_obj);
+ return GetObjectTypeDesc(type);
+}
+
+
+bool IsOfType(LiveObjectType type, HeapObject *obj) {
+ // Note: there are types that are more general (e.g. JSObject) that would
+ // have passed the Is##type_() test for more specialized types (e.g.
+ // JSFunction). If we find a more specialized match but we're looking for
+ // the general type, then we should reject the ones that matches the
+ // specialized type.
+#define CHECK_OBJECT_TYPE(type_, name) \
+ if (obj->Is##type_()) return (type == kType##type_);
+
+ FOR_EACH_LIVE_OBJECT_TYPE(CHECK_OBJECT_TYPE)
+#undef CHECK_OBJECT_TYPE
+
+ return false;
+}
+
+
+const AllocationSpace kInvalidSpace = static_cast<AllocationSpace>(-1);
+
+static AllocationSpace FindSpaceFor(String* space_str) {
+ SmartPointer<char> s =
+ space_str->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+
+ const char* key_str = *s;
+ switch (key_str[0]) {
+ case 'c':
+ if (strcmp(key_str, "cell") == 0) return CELL_SPACE;
+ if (strcmp(key_str, "code") == 0) return CODE_SPACE;
+ break;
+ case 'l':
+ if (strcmp(key_str, "lo") == 0) return LO_SPACE;
+ break;
+ case 'm':
+ if (strcmp(key_str, "map") == 0) return MAP_SPACE;
+ break;
+ case 'n':
+ if (strcmp(key_str, "new") == 0) return NEW_SPACE;
+ break;
+ case 'o':
+ if (strcmp(key_str, "old-pointer") == 0) return OLD_POINTER_SPACE;
+ if (strcmp(key_str, "old-data") == 0) return OLD_DATA_SPACE;
+ break;
+ }
+ return kInvalidSpace;
+}
+
+
+static bool InSpace(AllocationSpace space, HeapObject *heap_obj) {
+ if (space != LO_SPACE) {
+ return Heap::InSpace(heap_obj, space);
+ }
+
+ // This is an optimization to speed up the check for an object in the LO
+ // space by exclusion because we know that all object pointers passed in
+ // here are guaranteed to be in the heap. Hence, it is safe to infer
+ // using an exclusion test.
+ // Note: calling Heap::InSpace(heap_obj, LO_SPACE) is too slow for our
+ // filters.
+ int first_space = static_cast<int>(FIRST_SPACE);
+ int last_space = static_cast<int>(LO_SPACE);
+ for (int sp = first_space; sp < last_space; sp++) {
+ if (Heap::InSpace(heap_obj, static_cast<AllocationSpace>(sp))) {
+ return false;
+ }
+ }
+ SLOW_ASSERT(Heap::InSpace(heap_obj, LO_SPACE));
+ return true;
+}
+
+
+static LiveObjectType FindTypeFor(String* type_str) {
+ SmartPointer<char> s =
+ type_str->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+
+#define CHECK_OBJECT_TYPE(type_, name) { \
+ const char* type_desc = GetObjectTypeDesc(kType##type_); \
+ const char* key_str = *s; \
+ if (strstr(type_desc, key_str) != NULL) return kType##type_; \
+ }
+ FOR_EACH_LIVE_OBJECT_TYPE(CHECK_OBJECT_TYPE)
+#undef CHECK_OBJECT_TYPE
+
+ return kInvalidLiveObjType;
+}
+
+
+class LolFilter {
+ public:
+ explicit LolFilter(Handle<JSObject> filter_obj);
+
+ inline bool is_active() const { return is_active_; }
+ inline bool Matches(HeapObject* obj) {
+ return !is_active() || MatchesSlow(obj);
+ }
+
+ private:
+ void InitTypeFilter(Handle<JSObject> filter_obj);
+ void InitSpaceFilter(Handle<JSObject> filter_obj);
+ void InitPropertyFilter(Handle<JSObject> filter_obj);
+ bool MatchesSlow(HeapObject* obj);
+
+ bool is_active_;
+ LiveObjectType type_;
+ AllocationSpace space_;
+ Handle<String> prop_;
+};
+
+
+LolFilter::LolFilter(Handle<JSObject> filter_obj)
+ : is_active_(false),
+ type_(kInvalidLiveObjType),
+ space_(kInvalidSpace),
+ prop_() {
+ if (filter_obj.is_null()) return;
+
+ InitTypeFilter(filter_obj);
+ InitSpaceFilter(filter_obj);
+ InitPropertyFilter(filter_obj);
+}
+
+
+void LolFilter::InitTypeFilter(Handle<JSObject> filter_obj) {
+ Handle<String> type_sym = Factory::LookupAsciiSymbol("type");
+ MaybeObject* maybe_result = filter_obj->GetProperty(*type_sym);
+ Object* type_obj;
+ if (maybe_result->ToObject(&type_obj)) {
+ if (type_obj->IsString()) {
+ String* type_str = String::cast(type_obj);
+ type_ = FindTypeFor(type_str);
+ if (type_ != kInvalidLiveObjType) {
+ is_active_ = true;
+ }
+ }
+ }
+}
+
+
+void LolFilter::InitSpaceFilter(Handle<JSObject> filter_obj) {
+ Handle<String> space_sym = Factory::LookupAsciiSymbol("space");
+ MaybeObject* maybe_result = filter_obj->GetProperty(*space_sym);
+ Object* space_obj;
+ if (maybe_result->ToObject(&space_obj)) {
+ if (space_obj->IsString()) {
+ String* space_str = String::cast(space_obj);
+ space_ = FindSpaceFor(space_str);
+ if (space_ != kInvalidSpace) {
+ is_active_ = true;
+ }
+ }
+ }
+}
+
+
+void LolFilter::InitPropertyFilter(Handle<JSObject> filter_obj) {
+ Handle<String> prop_sym = Factory::LookupAsciiSymbol("prop");
+ MaybeObject* maybe_result = filter_obj->GetProperty(*prop_sym);
+ Object* prop_obj;
+ if (maybe_result->ToObject(&prop_obj)) {
+ if (prop_obj->IsString()) {
+ prop_ = Handle<String>(String::cast(prop_obj));
+ is_active_ = true;
+ }
+ }
+}
+
+
+bool LolFilter::MatchesSlow(HeapObject* obj) {
+ if ((type_ != kInvalidLiveObjType) && !IsOfType(type_, obj)) {
+ return false; // Fail because obj is not of the type of interest.
+ }
+ if ((space_ != kInvalidSpace) && !InSpace(space_, obj)) {
+ return false; // Fail because obj is not in the space of interest.
+ }
+ if (!prop_.is_null() && obj->IsJSObject()) {
+ LookupResult result;
+ obj->Lookup(*prop_, &result);
+ if (!result.IsProperty()) {
+ return false; // Fail because obj does not have the property of interest.
+ }
+ }
+ return true;
+}
+
+
+class LolIterator {
+ public:
+ LolIterator(LiveObjectList* older, LiveObjectList* newer)
+ : older_(older),
+ newer_(newer),
+ curr_(0),
+ elements_(0),
+ count_(0),
+ index_(0) { }
+
+ inline void Init() {
+ SetCurrent(newer_);
+ // If the elements_ list is empty, then move on to the next list as long
+ // as we're not at the last list (indicated by done()).
+ while ((elements_ == NULL) && !Done()) {
+ SetCurrent(curr_->prev_);
+ }
+ }
+
+ inline bool Done() const {
+ return (curr_ == older_);
+ }
+
+ // Object level iteration.
+ inline void Next() {
+ index_++;
+ if (index_ >= count_) {
+ // Iterate backwards until we get to the oldest list.
+ while (!Done()) {
+ SetCurrent(curr_->prev_);
+ // If we have elements to process, we're good to go.
+ if (elements_ != NULL) break;
+
+ // Else, we should advance to the next older list.
+ }
+ }
+ }
+
+ inline int Id() const {
+ return elements_[index_].id_;
+ }
+ inline HeapObject* Obj() const {
+ return elements_[index_].obj_;
+ }
+
+ inline int LolObjCount() const {
+ if (curr_ != NULL) return curr_->obj_count_;
+ return 0;
+ }
+
+ protected:
+ inline void SetCurrent(LiveObjectList* new_curr) {
+ curr_ = new_curr;
+ if (curr_ != NULL) {
+ elements_ = curr_->elements_;
+ count_ = curr_->obj_count_;
+ index_ = 0;
+ }
+ }
+
+ LiveObjectList* older_;
+ LiveObjectList* newer_;
+ LiveObjectList* curr_;
+ LiveObjectList::Element* elements_;
+ int count_;
+ int index_;
+};
+
+
+class LolForwardIterator : public LolIterator {
+ public:
+ LolForwardIterator(LiveObjectList* first, LiveObjectList* last)
+ : LolIterator(first, last) {
+ }
+
+ inline void Init() {
+ SetCurrent(older_);
+ // If the elements_ list is empty, then move on to the next list as long
+ // as we're not at the last list (indicated by Done()).
+ while ((elements_ == NULL) && !Done()) {
+ SetCurrent(curr_->next_);
+ }
+ }
+
+ inline bool Done() const {
+ return (curr_ == newer_);
+ }
+
+ // Object level iteration.
+ inline void Next() {
+ index_++;
+ if (index_ >= count_) {
+ // Done with current list. Move on to the next.
+ while (!Done()) { // If not at the last list already, ...
+ SetCurrent(curr_->next_);
+ // If we have elements to process, we're good to go.
+ if (elements_ != NULL) break;
+
+ // Else, we should advance to the next list.
+ }
+ }
+ }
+};
+
+
+// Minimizes the white space in a string. Tabs and newlines are replaced
+// with a space where appropriate.
+static int CompactString(char* str) {
+ char* src = str;
+ char* dst = str;
+ char prev_ch = 0;
+ while (*dst != '\0') {
+ char ch = *src++;
+ // We will treat non-ascii chars as '?'.
+ if ((ch & 0x80) != 0) {
+ ch = '?';
+ }
+ // Compact contiguous whitespace chars into a single ' '.
+ if (isspace(ch)) {
+ if (prev_ch != ' ') *dst++ = ' ';
+ prev_ch = ' ';
+ continue;
+ }
+ *dst++ = ch;
+ prev_ch = ch;
+ }
+ return (dst - str);
+}
+
+
+// Generates a custom description based on the specific type of
+// object we're looking at. We only generate specialized
+// descriptions where we can. In all other cases, we emit the
+// generic info.
+static void GenerateObjectDesc(HeapObject* obj,
+ char* buffer,
+ int buffer_size) {
+ Vector<char> buffer_v(buffer, buffer_size);
+ ASSERT(obj != NULL);
+ if (obj->IsJSArray()) {
+ JSArray* jsarray = JSArray::cast(obj);
+ double length = jsarray->length()->Number();
+ OS::SNPrintF(buffer_v,
+ "%p <%s> len %g",
+ reinterpret_cast<void*>(obj),
+ GetObjectTypeDesc(obj),
+ length);
+
+ } else if (obj->IsString()) {
+ String *str = String::cast(obj);
+ // Only grab up to 160 chars in case they are double byte.
+ // We'll only dump 80 of them after we compact them.
+ const int kMaxCharToDump = 80;
+ const int kMaxBufferSize = kMaxCharToDump * 2;
+ SmartPointer<char> str_sp = str->ToCString(DISALLOW_NULLS,
+ ROBUST_STRING_TRAVERSAL,
+ 0,
+ kMaxBufferSize);
+ char* str_cstr = *str_sp;
+ int length = CompactString(str_cstr);
+ OS::SNPrintF(buffer_v,
+ "%p <%s> '%.80s%s'",
+ reinterpret_cast<void*>(obj),
+ GetObjectTypeDesc(obj),
+ str_cstr,
+ (length > kMaxCharToDump) ? "..." : "");
+
+ } else if (obj->IsJSFunction() || obj->IsSharedFunctionInfo()) {
+ SharedFunctionInfo* sinfo;
+ if (obj->IsJSFunction()) {
+ JSFunction* func = JSFunction::cast(obj);
+ sinfo = func->shared();
+ } else {
+ sinfo = SharedFunctionInfo::cast(obj);
+ }
+
+ String* name = sinfo->DebugName();
+ SmartPointer<char> name_sp =
+ name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+ char* name_cstr = *name_sp;
+
+ HeapStringAllocator string_allocator;
+ StringStream stream(&string_allocator);
+ sinfo->SourceCodePrint(&stream, 50);
+ SmartPointer<const char> source_sp = stream.ToCString();
+ const char* source_cstr = *source_sp;
+
+ OS::SNPrintF(buffer_v,
+ "%p <%s> '%s' %s",
+ reinterpret_cast<void*>(obj),
+ GetObjectTypeDesc(obj),
+ name_cstr,
+ source_cstr);
+
+ } else if (obj->IsFixedArray()) {
+ FixedArray* fixed = FixedArray::cast(obj);
+
+ OS::SNPrintF(buffer_v,
+ "%p <%s> len %d",
+ reinterpret_cast<void*>(obj),
+ GetObjectTypeDesc(obj),
+ fixed->length());
+
+ } else {
+ OS::SNPrintF(buffer_v,
+ "%p <%s>",
+ reinterpret_cast<void*>(obj),
+ GetObjectTypeDesc(obj));
+ }
+}
+
+
+// Utility function for filling in a line of detail in a verbose dump.
+static bool AddObjDetail(Handle<FixedArray> arr,
+ int index,
+ int obj_id,
+ Handle<HeapObject> target,
+ const char* desc_str,
+ Handle<String> id_sym,
+ Handle<String> desc_sym,
+ Handle<String> size_sym,
+ Handle<JSObject> detail,
+ Handle<String> desc,
+ Handle<Object> error) {
+ detail = Factory::NewJSObject(Top::object_function());
+ if (detail->IsFailure()) {
+ error = detail;
+ return false;
+ }
+
+ int size = 0;
+ char buffer[512];
+ if (desc_str == NULL) {
+ ASSERT(!target.is_null());
+ HeapObject* obj = *target;
+ GenerateObjectDesc(obj, buffer, sizeof(buffer));
+ desc_str = buffer;
+ size = obj->Size();
+ }
+ desc = Factory::NewStringFromAscii(CStrVector(desc_str));
+ if (desc->IsFailure()) {
+ error = desc;
+ return false;
+ }
+
+ { MaybeObject* maybe_result =
+ detail->SetProperty(*id_sym, Smi::FromInt(obj_id), NONE);
+ if (maybe_result->IsFailure()) return false;
+ }
+ { MaybeObject* maybe_result =
+ detail->SetProperty(*desc_sym, *desc, NONE);
+ if (maybe_result->IsFailure()) return false;
+ }
+ { MaybeObject* maybe_result =
+ detail->SetProperty(*size_sym, Smi::FromInt(size), NONE);
+ if (maybe_result->IsFailure()) return false;
+ }
+
+ arr->set(index, *detail);
+ return true;
+}
+
+
+class DumpWriter {
+ public:
+ virtual ~DumpWriter() {}
+
+ virtual void ComputeTotalCountAndSize(LolFilter* filter,
+ int* count,
+ int* size) = 0;
+ virtual bool Write(Handle<FixedArray> elements_arr,
+ int start,
+ int dump_limit,
+ LolFilter* filter,
+ Handle<Object> error) = 0;
+};
+
+
+class LolDumpWriter: public DumpWriter {
+ public:
+ LolDumpWriter(LiveObjectList* older, LiveObjectList* newer)
+ : older_(older), newer_(newer) {
+ }
+
+ void ComputeTotalCountAndSize(LolFilter* filter, int* count, int* size) {
+ *count = 0;
+ *size = 0;
+
+ LolIterator it(older_, newer_);
+ for (it.Init(); !it.Done(); it.Next()) {
+ HeapObject* heap_obj = it.Obj();
+ if (!filter->Matches(heap_obj)) {
+ continue;
+ }
+
+ *size += heap_obj->Size();
+ (*count)++;
+ }
+ }
+
+ bool Write(Handle<FixedArray> elements_arr,
+ int start,
+ int dump_limit,
+ LolFilter* filter,
+ Handle<Object> error) {
+ // The lols are listed in latest to earliest. We want to dump from
+ // earliest to latest. So, compute the last element to start with.
+ int index = 0;
+ int count = 0;
+
+ // Prefetch some needed symbols.
+ Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
+ Handle<String> desc_sym = Factory::LookupAsciiSymbol("desc");
+ Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+
+ // Fill the array with the lol object details.
+ Handle<JSObject> detail;
+ Handle<String> desc;
+ Handle<HeapObject> target;
+
+ LiveObjectList* first_lol = (older_ != NULL) ?
+ older_->next_ : LiveObjectList::first_;
+ LiveObjectList* last_lol = (newer_ != NULL) ? newer_->next_ : NULL;
+
+ LolForwardIterator it(first_lol, last_lol);
+ for (it.Init(); !it.Done() && (index < dump_limit); it.Next()) {
+ HeapObject* heap_obj = it.Obj();
+
+ // Skip objects that have been filtered out.
+ if (!filter->Matches(heap_obj)) {
+ continue;
+ }
+
+ // Only report objects that are in the section of interest.
+ if (count >= start) {
+ target = Handle<HeapObject>(heap_obj);
+ bool success = AddObjDetail(elements_arr,
+ index++,
+ it.Id(),
+ target,
+ NULL,
+ id_sym,
+ desc_sym,
+ size_sym,
+ detail,
+ desc,
+ error);
+ if (!success) return false;
+ }
+ count++;
+ }
+ return true;
+ }
+
+ private:
+ LiveObjectList* older_;
+ LiveObjectList* newer_;
+};
+
+
+class RetainersDumpWriter: public DumpWriter {
+ public:
+ RetainersDumpWriter(Handle<HeapObject> target,
+ Handle<JSObject> instance_filter,
+ Handle<JSFunction> args_function)
+ : target_(target),
+ instance_filter_(instance_filter),
+ args_function_(args_function) {
+ }
+
+ void ComputeTotalCountAndSize(LolFilter* filter, int* count, int* size) {
+ Handle<FixedArray> retainers_arr;
+ Handle<Object> error;
+
+ *size = -1;
+ LiveObjectList::GetRetainers(target_,
+ instance_filter_,
+ retainers_arr,
+ 0,
+ Smi::kMaxValue,
+ count,
+ filter,
+ NULL,
+ *args_function_,
+ error);
+ }
+
+ bool Write(Handle<FixedArray> elements_arr,
+ int start,
+ int dump_limit,
+ LolFilter* filter,
+ Handle<Object> error) {
+ int dummy;
+ int count;
+
+ // Fill the retainer objects.
+ count = LiveObjectList::GetRetainers(target_,
+ instance_filter_,
+ elements_arr,
+ start,
+ dump_limit,
+ &dummy,
+ filter,
+ NULL,
+ *args_function_,
+ error);
+ if (count < 0) {
+ return false;
+ }
+ return true;
+ }
+
+ private:
+ Handle<HeapObject> target_;
+ Handle<JSObject> instance_filter_;
+ Handle<JSFunction> args_function_;
+};
+
+
+class LiveObjectSummary {
+ public:
+ explicit LiveObjectSummary(LolFilter* filter)
+ : total_count_(0),
+ total_size_(0),
+ found_root_(false),
+ found_weak_root_(false),
+ filter_(filter) {
+ memset(counts_, 0, sizeof(counts_[0]) * kNumberOfEntries);
+ memset(sizes_, 0, sizeof(sizes_[0]) * kNumberOfEntries);
+ }
+
+ void Add(HeapObject* heap_obj) {
+ int size = heap_obj->Size();
+ LiveObjectType type = GetObjectType(heap_obj);
+ ASSERT(type != kInvalidLiveObjType);
+ counts_[type]++;
+ sizes_[type] += size;
+ total_count_++;
+ total_size_ += size;
+ }
+
+ void set_found_root() { found_root_ = true; }
+ void set_found_weak_root() { found_weak_root_ = true; }
+
+ inline int Count(LiveObjectType type) {
+ return counts_[type];
+ }
+ inline int Size(LiveObjectType type) {
+ return sizes_[type];
+ }
+ inline int total_count() {
+ return total_count_;
+ }
+ inline int total_size() {
+ return total_size_;
+ }
+ inline bool found_root() {
+ return found_root_;
+ }
+ inline bool found_weak_root() {
+ return found_weak_root_;
+ }
+ int GetNumberOfEntries() {
+ int entries = 0;
+ for (int i = 0; i < kNumberOfEntries; i++) {
+ if (counts_[i]) entries++;
+ }
+ return entries;
+ }
+
+ inline LolFilter* filter() { return filter_; }
+
+ static const int kNumberOfEntries = kNumberOfTypes;
+
+ private:
+ int counts_[kNumberOfEntries];
+ int sizes_[kNumberOfEntries];
+ int total_count_;
+ int total_size_;
+ bool found_root_;
+ bool found_weak_root_;
+
+ LolFilter *filter_;
+};
+
+
+// Abstraction for a summary writer.
+class SummaryWriter {
+ public:
+ virtual ~SummaryWriter() {}
+ virtual void Write(LiveObjectSummary* summary) = 0;
+};
+
+
+// A summary writer for filling in a summary of lol lists and diffs.
+class LolSummaryWriter: public SummaryWriter {
+ public:
+ LolSummaryWriter(LiveObjectList *older_lol,
+ LiveObjectList *newer_lol)
+ : older_(older_lol), newer_(newer_lol) {
+ }
+
+ void Write(LiveObjectSummary* summary) {
+ LolFilter* filter = summary->filter();
+
+ // Fill the summary with the lol object details.
+ LolIterator it(older_, newer_);
+ for (it.Init(); !it.Done(); it.Next()) {
+ HeapObject* heap_obj = it.Obj();
+ if (!filter->Matches(heap_obj)) {
+ continue;
+ }
+ summary->Add(heap_obj);
+ }
+ }
+
+ private:
+ LiveObjectList* older_;
+ LiveObjectList* newer_;
+};
+
+
+// A summary writer for filling in a retainers list.
+class RetainersSummaryWriter: public SummaryWriter {
+ public:
+ RetainersSummaryWriter(Handle<HeapObject> target,
+ Handle<JSObject> instance_filter,
+ Handle<JSFunction> args_function)
+ : target_(target),
+ instance_filter_(instance_filter),
+ args_function_(args_function) {
+ }
+
+ void Write(LiveObjectSummary* summary) {
+ Handle<FixedArray> retainers_arr;
+ Handle<Object> error;
+ int dummy_total_count;
+ LiveObjectList::GetRetainers(target_,
+ instance_filter_,
+ retainers_arr,
+ 0,
+ Smi::kMaxValue,
+ &dummy_total_count,
+ summary->filter(),
+ summary,
+ *args_function_,
+ error);
+ }
+
+ private:
+ Handle<HeapObject> target_;
+ Handle<JSObject> instance_filter_;
+ Handle<JSFunction> args_function_;
+};
+
+
+uint32_t LiveObjectList::next_element_id_ = 1;
+int LiveObjectList::list_count_ = 0;
+int LiveObjectList::last_id_ = 0;
+LiveObjectList* LiveObjectList::first_ = NULL;
+LiveObjectList* LiveObjectList::last_ = NULL;
+
+
+LiveObjectList::LiveObjectList(LiveObjectList* prev, int capacity)
+ : prev_(prev),
+ next_(NULL),
+ capacity_(capacity),
+ obj_count_(0) {
+ elements_ = NewArray<Element>(capacity);
+ id_ = ++last_id_;
+
+ list_count_++;
+}
+
+
+LiveObjectList::~LiveObjectList() {
+ DeleteArray<Element>(elements_);
+ delete prev_;
+}
+
+
+int LiveObjectList::GetTotalObjCountAndSize(int* size_p) {
+ int size = 0;
+ int count = 0;
+ LiveObjectList *lol = this;
+ do {
+ // Only compute total size if requested i.e. when size_p is not null.
+ if (size_p != NULL) {
+ Element* elements = lol->elements_;
+ for (int i = 0; i < lol->obj_count_; i++) {
+ HeapObject* heap_obj = elements[i].obj_;
+ size += heap_obj->Size();
+ }
+ }
+ count += lol->obj_count_;
+ lol = lol->prev_;
+ } while (lol != NULL);
+
+ if (size_p != NULL) {
+ *size_p = size;
+ }
+ return count;
+}
+
+
+// Adds an object to the lol.
+// Returns true if successful, else returns false.
+bool LiveObjectList::Add(HeapObject* obj) {
+ // If the object is already accounted for in the prev list which we inherit
+ // from, then no need to add it to this list.
+ if ((prev() != NULL) && (prev()->Find(obj) != NULL)) {
+ return true;
+ }
+ ASSERT(obj_count_ <= capacity_);
+ if (obj_count_ == capacity_) {
+ // The heap must have grown and we have more objects than capacity to store
+ // them.
+ return false; // Fail this addition.
+ }
+ Element& element = elements_[obj_count_++];
+ element.id_ = next_element_id_++;
+ element.obj_ = obj;
+ return true;
+}
+
+
+// Comparator used for sorting and searching the lol.
+int LiveObjectList::CompareElement(const Element* a, const Element* b) {
+ const HeapObject* obj1 = a->obj_;
+ const HeapObject* obj2 = b->obj_;
+ // For lol elements, it doesn't matter which comes first if 2 elements point
+ // to the same object (which gets culled later). Hence, we only care about
+ // the the greater than / less than relationships.
+ return (obj1 > obj2) ? 1 : (obj1 == obj2) ? 0 : -1;
+}
+
+
+// Looks for the specified object in the lol, and returns its element if found.
+LiveObjectList::Element* LiveObjectList::Find(HeapObject* obj) {
+ LiveObjectList* lol = this;
+ Element key;
+ Element* result = NULL;
+
+ key.obj_ = obj;
+ // Iterate through the chain of lol's to look for the object.
+ while ((result == NULL) && (lol != NULL)) {
+ result = reinterpret_cast<Element*>(
+ bsearch(&key, lol->elements_, lol->obj_count_,
+ sizeof(Element),
+ reinterpret_cast<RawComparer>(CompareElement)));
+ lol = lol->prev_;
+ }
+ return result;
+}
+
+
+// "Nullifies" (convert the HeapObject* into an SMI) so that it will get cleaned
+// up in the GCEpilogue, while preserving the sort order of the lol.
+// NOTE: the lols need to be already sorted before NullifyMostRecent() is
+// called.
+void LiveObjectList::NullifyMostRecent(HeapObject* obj) {
+ LiveObjectList* lol = last();
+ Element key;
+ Element* result = NULL;
+
+ key.obj_ = obj;
+ // Iterate through the chain of lol's to look for the object.
+ while (lol != NULL) {
+ result = reinterpret_cast<Element*>(
+ bsearch(&key, lol->elements_, lol->obj_count_,
+ sizeof(Element),
+ reinterpret_cast<RawComparer>(CompareElement)));
+ if (result != NULL) {
+ // Since there may be more than one (we are nullifying dup's after all),
+ // find the first in the current lol, and nullify that. The lol should
+ // be sorted already to make this easy (see the use of SortAll()).
+ int i = result - lol->elements_;
+
+ // NOTE: we sort the lol in increasing order. So, if an object has been
+ // "nullified" (its lowest bit will be cleared to make it look like an
+ // SMI), it would/should show up before the equivalent dups that have not
+ // yet been "nullified". Hence, we should be searching backwards for the
+ // first occurence of a matching object and nullify that instance. This
+ // will ensure that we preserve the expected sorting order.
+ for (i--; i > 0; i--) {
+ Element* element = &lol->elements_[i];
+ HeapObject* curr_obj = element->obj_;
+ if (curr_obj != obj) {
+ break; // No more matches. Let's move on.
+ }
+ result = element; // Let this earlier match be the result.
+ }
+
+ // Nullify the object.
+ NullifyNonLivePointer(&result->obj_);
+ return;
+ }
+ lol = lol->prev_;
+ }
+}
+
+
+// Sorts the lol.
+void LiveObjectList::Sort() {
+ if (obj_count_ > 0) {
+ Vector<Element> elements_v(elements_, obj_count_);
+ elements_v.Sort(CompareElement);
+ }
+}
+
+
+// Sorts all captured lols starting from the latest.
+void LiveObjectList::SortAll() {
+ LiveObjectList* lol = last();
+ while (lol != NULL) {
+ lol->Sort();
+ lol = lol->prev_;
+ }
+}
+
+
+// Counts the number of objects in the heap.
+static int CountHeapObjects() {
+ int count = 0;
+ // Iterate over all the heap spaces and count the number of objects.
+ HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
+ HeapObject* heap_obj = NULL;
+ while ((heap_obj = iterator.next()) != NULL) {
+ count++;
+ }
+ return count;
+}
+
+
+// Captures a current snapshot of all objects in the heap.
+MaybeObject* LiveObjectList::Capture() {
+ HandleScope scope;
+
+ // Count the number of objects in the heap.
+ int total_count = CountHeapObjects();
+ int count = total_count;
+ int size = 0;
+
+ LiveObjectList* last_lol = last();
+ if (last_lol != NULL) {
+ count -= last_lol->TotalObjCount();
+ }
+
+ LiveObjectList* lol;
+
+ // Create a lol large enough to track all the objects.
+ lol = new LiveObjectList(last_lol, count);
+ if (lol == NULL) {
+ return NULL; // No memory to proceed.
+ }
+
+ // The HeapIterator needs to be in its own scope because it disables
+ // allocation, and we need allocate below.
+ {
+ // Iterate over all the heap spaces and add the objects.
+ HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
+ HeapObject* heap_obj = NULL;
+ bool failed = false;
+ while (!failed && (heap_obj = iterator.next()) != NULL) {
+ failed = !lol->Add(heap_obj);
+ size += heap_obj->Size();
+ }
+ ASSERT(!failed);
+
+ lol->Sort();
+
+ // Add the current lol to the list of lols.
+ if (last_ != NULL) {
+ last_->next_ = lol;
+ } else {
+ first_ = lol;
+ }
+ last_ = lol;
+
+#ifdef VERIFY_LOL
+ if (FLAG_verify_lol) {
+ Verify(true);
+ }
+#endif
+ }
+
+ Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
+ Handle<String> count_sym = Factory::LookupAsciiSymbol("count");
+ Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+
+ Handle<JSObject> result = Factory::NewJSObject(Top::object_function());
+ if (result->IsFailure()) return Object::cast(*result);
+
+ { MaybeObject* maybe_result =
+ result->SetProperty(*id_sym, Smi::FromInt(lol->id()), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ }
+ { MaybeObject* maybe_result =
+ result->SetProperty(*count_sym, Smi::FromInt(total_count), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ }
+ { MaybeObject* maybe_result =
+ result->SetProperty(*size_sym, Smi::FromInt(size), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ }
+
+ return *result;
+}
+
+
+// Delete doesn't actually deletes an lol. It just marks it as invisible since
+// its contents are considered to be part of subsequent lists as well. The
+// only time we'll actually delete the lol is when we Reset() or if the lol is
+// invisible, and its element count reaches 0.
+bool LiveObjectList::Delete(int id) {
+ LiveObjectList *lol = last();
+ while (lol != NULL) {
+ if (lol->id() == id) {
+ break;
+ }
+ lol = lol->prev_;
+ }
+
+ // If no lol is found for this id, then we fail to delete.
+ if (lol == NULL) return false;
+
+ // Else, mark the lol as invisible i.e. id == 0.
+ lol->id_ = 0;
+ list_count_--;
+ ASSERT(list_count_ >= 0);
+ if (lol->obj_count_ == 0) {
+ // Point the next lol's prev to this lol's prev.
+ LiveObjectList* next = lol->next_;
+ LiveObjectList* prev = lol->prev_;
+ // Point next's prev to prev.
+ if (next != NULL) {
+ next->prev_ = lol->prev_;
+ } else {
+ last_ = lol->prev_;
+ }
+ // Point prev's next to next.
+ if (prev != NULL) {
+ prev->next_ = lol->next_;
+ } else {
+ first_ = lol->next_;
+ }
+
+ lol->prev_ = NULL;
+ lol->next_ = NULL;
+
+ // Delete this now empty and invisible lol.
+ delete lol;
+ }
+
+ // Just in case we've marked everything invisible, then clean up completely.
+ if (list_count_ == 0) {
+ Reset();
+ }
+
+ return true;
+}
+
+
+MaybeObject* LiveObjectList::Dump(int older_id,
+ int newer_id,
+ int start_idx,
+ int dump_limit,
+ Handle<JSObject> filter_obj) {
+ if ((older_id < 0) || (newer_id < 0) || (last() == NULL)) {
+ return Failure::Exception(); // Fail: 0 is not a valid lol id.
+ }
+ if (newer_id < older_id) {
+ // They are not in the expected order. Swap them.
+ int temp = older_id;
+ older_id = newer_id;
+ newer_id = temp;
+ }
+
+ LiveObjectList *newer_lol = FindLolForId(newer_id, last());
+ LiveObjectList *older_lol = FindLolForId(older_id, newer_lol);
+
+ // If the id is defined, and we can't find a LOL for it, then we have an
+ // invalid id.
+ if ((newer_id != 0) && (newer_lol == NULL)) {
+ return Failure::Exception(); // Fail: the newer lol id is invalid.
+ }
+ if ((older_id != 0) && (older_lol == NULL)) {
+ return Failure::Exception(); // Fail: the older lol id is invalid.
+ }
+
+ LolFilter filter(filter_obj);
+ LolDumpWriter writer(older_lol, newer_lol);
+ return DumpPrivate(&writer, start_idx, dump_limit, &filter);
+}
+
+
+MaybeObject* LiveObjectList::DumpPrivate(DumpWriter* writer,
+ int start,
+ int dump_limit,
+ LolFilter* filter) {
+ HandleScope scope;
+
+ // Calculate the number of entries of the dump.
+ int count = -1;
+ int size = -1;
+ writer->ComputeTotalCountAndSize(filter, &count, &size);
+
+ // Adjust for where to start the dump.
+ if ((start < 0) || (start >= count)) {
+ return Failure::Exception(); // invalid start.
+ }
+
+ int remaining_count = count - start;
+ if (dump_limit > remaining_count) {
+ dump_limit = remaining_count;
+ }
+
+ // Allocate an array to hold the result.
+ Handle<FixedArray> elements_arr = Factory::NewFixedArray(dump_limit);
+ if (elements_arr->IsFailure()) return Object::cast(*elements_arr);
+
+ // Fill in the dump.
+ Handle<Object> error;
+ bool success = writer->Write(elements_arr,
+ start,
+ dump_limit,
+ filter,
+ error);
+ if (!success) return Object::cast(*error);
+
+ MaybeObject* maybe_result;
+
+ // Allocate the result body.
+ Handle<JSObject> body = Factory::NewJSObject(Top::object_function());
+ if (body->IsFailure()) return Object::cast(*body);
+
+ // Set the updated body.count.
+ Handle<String> count_sym = Factory::LookupAsciiSymbol("count");
+ maybe_result = body->SetProperty(*count_sym, Smi::FromInt(count), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ // Set the updated body.size if appropriate.
+ if (size >= 0) {
+ Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+ maybe_result = body->SetProperty(*size_sym, Smi::FromInt(size), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ }
+
+ // Set body.first_index.
+ Handle<String> first_sym = Factory::LookupAsciiSymbol("first_index");
+ maybe_result = body->SetProperty(*first_sym, Smi::FromInt(start), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ // Allocate the JSArray of the elements.
+ Handle<JSObject> elements = Factory::NewJSObject(Top::array_function());
+ if (elements->IsFailure()) return Object::cast(*elements);
+ Handle<JSArray>::cast(elements)->SetContent(*elements_arr);
+
+ // Set body.elements.
+ Handle<String> elements_sym = Factory::LookupAsciiSymbol("elements");
+ maybe_result = body->SetProperty(*elements_sym, *elements, NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ return *body;
+}
+
+
+MaybeObject* LiveObjectList::Summarize(int older_id,
+ int newer_id,
+ Handle<JSObject> filter_obj) {
+ if ((older_id < 0) || (newer_id < 0) || (last() == NULL)) {
+ return Failure::Exception(); // Fail: 0 is not a valid lol id.
+ }
+ if (newer_id < older_id) {
+ // They are not in the expected order. Swap them.
+ int temp = older_id;
+ older_id = newer_id;
+ newer_id = temp;
+ }
+
+ LiveObjectList *newer_lol = FindLolForId(newer_id, last());
+ LiveObjectList *older_lol = FindLolForId(older_id, newer_lol);
+
+ // If the id is defined, and we can't find a LOL for it, then we have an
+ // invalid id.
+ if ((newer_id != 0) && (newer_lol == NULL)) {
+ return Failure::Exception(); // Fail: the newer lol id is invalid.
+ }
+ if ((older_id != 0) && (older_lol == NULL)) {
+ return Failure::Exception(); // Fail: the older lol id is invalid.
+ }
+
+ LolFilter filter(filter_obj);
+ LolSummaryWriter writer(older_lol, newer_lol);
+ return SummarizePrivate(&writer, &filter, false);
+}
+
+
+// Creates a summary report for the debugger.
+// Note: the SummaryWriter takes care of iterating over objects and filling in
+// the summary.
+MaybeObject* LiveObjectList::SummarizePrivate(SummaryWriter* writer,
+ LolFilter* filter,
+ bool is_tracking_roots) {
+ HandleScope scope;
+ MaybeObject* maybe_result;
+
+ LiveObjectSummary summary(filter);
+ writer->Write(&summary);
+
+ // The result body will look like this:
+ // body: {
+ // count: <total_count>,
+ // size: <total_size>,
+ // found_root: <boolean>, // optional.
+ // found_weak_root: <boolean>, // optional.
+ // summary: [
+ // {
+ // desc: "<object type name>",
+ // count: <count>,
+ // size: size
+ // },
+ // ...
+ // ]
+ // }
+
+ // Prefetch some needed symbols.
+ Handle<String> desc_sym = Factory::LookupAsciiSymbol("desc");
+ Handle<String> count_sym = Factory::LookupAsciiSymbol("count");
+ Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+ Handle<String> summary_sym = Factory::LookupAsciiSymbol("summary");
+
+ // Allocate the summary array.
+ int entries_count = summary.GetNumberOfEntries();
+ Handle<FixedArray> summary_arr =
+ Factory::NewFixedArray(entries_count);
+ if (summary_arr->IsFailure()) return Object::cast(*summary_arr);
+
+ int idx = 0;
+ for (int i = 0; i < LiveObjectSummary::kNumberOfEntries; i++) {
+ // Allocate the summary record.
+ Handle<JSObject> detail = Factory::NewJSObject(Top::object_function());
+ if (detail->IsFailure()) return Object::cast(*detail);
+
+ // Fill in the summary record.
+ LiveObjectType type = static_cast<LiveObjectType>(i);
+ int count = summary.Count(type);
+ if (count) {
+ const char* desc_cstr = GetObjectTypeDesc(type);
+ Handle<String> desc = Factory::LookupAsciiSymbol(desc_cstr);
+ int size = summary.Size(type);
+
+ maybe_result = detail->SetProperty(*desc_sym, *desc, NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ maybe_result = detail->SetProperty(*count_sym, Smi::FromInt(count), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ maybe_result = detail->SetProperty(*size_sym, Smi::FromInt(size), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ summary_arr->set(idx++, *detail);
+ }
+ }
+
+ // Wrap the summary fixed array in a JS array.
+ Handle<JSObject> summary_obj = Factory::NewJSObject(Top::array_function());
+ if (summary_obj->IsFailure()) return Object::cast(*summary_obj);
+ Handle<JSArray>::cast(summary_obj)->SetContent(*summary_arr);
+
+ // Create the body object.
+ Handle<JSObject> body = Factory::NewJSObject(Top::object_function());
+ if (body->IsFailure()) return Object::cast(*body);
+
+ // Fill out the body object.
+ int total_count = summary.total_count();
+ int total_size = summary.total_size();
+ maybe_result =
+ body->SetProperty(*count_sym, Smi::FromInt(total_count), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ maybe_result = body->SetProperty(*size_sym, Smi::FromInt(total_size), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ if (is_tracking_roots) {
+ int found_root = summary.found_root();
+ int found_weak_root = summary.found_weak_root();
+ Handle<String> root_sym = Factory::LookupAsciiSymbol("found_root");
+ Handle<String> weak_root_sym =
+ Factory::LookupAsciiSymbol("found_weak_root");
+ maybe_result =
+ body->SetProperty(*root_sym, Smi::FromInt(found_root), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ maybe_result =
+ body->SetProperty(*weak_root_sym, Smi::FromInt(found_weak_root), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ }
+
+ maybe_result = body->SetProperty(*summary_sym, *summary_obj, NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ return *body;
+}
+
+
+// Returns an array listing the captured lols.
+// Note: only dumps the section starting at start_idx and only up to
+// dump_limit entries.
+MaybeObject* LiveObjectList::Info(int start_idx, int dump_limit) {
+ HandleScope scope;
+ MaybeObject* maybe_result;
+
+ int total_count = LiveObjectList::list_count();
+ int dump_count = total_count;
+
+ // Adjust for where to start the dump.
+ if (total_count == 0) {
+ start_idx = 0; // Ensure this to get an empty list.
+ } else if ((start_idx < 0) || (start_idx >= total_count)) {
+ return Failure::Exception(); // invalid start.
+ }
+ dump_count -= start_idx;
+
+ // Adjust for the dump limit.
+ if (dump_count > dump_limit) {
+ dump_count = dump_limit;
+ }
+
+ // Allocate an array to hold the result.
+ Handle<FixedArray> list = Factory::NewFixedArray(dump_count);
+ if (list->IsFailure()) return Object::cast(*list);
+
+ // Prefetch some needed symbols.
+ Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
+ Handle<String> count_sym = Factory::LookupAsciiSymbol("count");
+ Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+
+ // Fill the array with the lol details.
+ int idx = 0;
+ LiveObjectList* lol = first_;
+ while ((lol != NULL) && (idx < start_idx)) { // Skip tail entries.
+ if (lol->id() != 0) {
+ idx++;
+ }
+ lol = lol->next();
+ }
+ idx = 0;
+ while ((lol != NULL) && (dump_limit != 0)) {
+ if (lol->id() != 0) {
+ int count;
+ int size;
+ count = lol->GetTotalObjCountAndSize(&size);
+
+ Handle<JSObject> detail = Factory::NewJSObject(Top::object_function());
+ if (detail->IsFailure()) return Object::cast(*detail);
+
+ maybe_result =
+ detail->SetProperty(*id_sym, Smi::FromInt(lol->id()), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ maybe_result =
+ detail->SetProperty(*count_sym, Smi::FromInt(count), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ maybe_result = detail->SetProperty(*size_sym, Smi::FromInt(size), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+ list->set(idx++, *detail);
+ dump_limit--;
+ }
+ lol = lol->next();
+ }
+
+ // Return the result as a JS array.
+ Handle<JSObject> lols = Factory::NewJSObject(Top::array_function());
+ Handle<JSArray>::cast(lols)->SetContent(*list);
+
+ Handle<JSObject> result = Factory::NewJSObject(Top::object_function());
+ if (result->IsFailure()) return Object::cast(*result);
+
+ maybe_result =
+ result->SetProperty(*count_sym, Smi::FromInt(total_count), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ Handle<String> first_sym = Factory::LookupAsciiSymbol("first_index");
+ maybe_result =
+ result->SetProperty(*first_sym, Smi::FromInt(start_idx), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ Handle<String> lists_sym = Factory::LookupAsciiSymbol("lists");
+ maybe_result = result->SetProperty(*lists_sym, *lols, NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ return *result;
+}
+
+
+// Deletes all captured lols.
+void LiveObjectList::Reset() {
+ LiveObjectList *lol = last();
+ // Just delete the last. Each lol will delete it's prev automatically.
+ delete lol;
+
+ next_element_id_ = 1;
+ list_count_ = 0;
+ last_id_ = 0;
+ first_ = NULL;
+ last_ = NULL;
+}
+
+
+// Gets the object for the specified obj id.
+Object* LiveObjectList::GetObj(int obj_id) {
+ Element* element = FindElementFor<int>(GetElementId, obj_id);
+ if (element != NULL) {
+ return Object::cast(element->obj_);
+ }
+ return Heap::undefined_value();
+}
+
+
+// Gets the obj id for the specified address if valid.
+int LiveObjectList::GetObjId(Object* obj) {
+ // Make a heap object pointer from the address.
+ HeapObject* hobj = HeapObject::cast(obj);
+ Element* element = FindElementFor<HeapObject*>(GetElementObj, hobj);
+ if (element != NULL) {
+ return element->id_;
+ }
+ return 0; // Invalid address.
+}
+
+
+// Gets the obj id for the specified address if valid.
+Object* LiveObjectList::GetObjId(Handle<String> address) {
+ SmartPointer<char> addr_str =
+ address->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+
+ // Extract the address value from the string.
+ int value = static_cast<int>(StringToInt(*address, 16));
+ Object* obj = reinterpret_cast<Object*>(value);
+ return Smi::FromInt(GetObjId(obj));
+}
+
+
+// Helper class for copying HeapObjects.
+class LolVisitor: public ObjectVisitor {
+ public:
+
+ LolVisitor(HeapObject* target, Handle<HeapObject> handle_to_skip)
+ : target_(target), handle_to_skip_(handle_to_skip), found_(false) {}
+
+ void VisitPointer(Object** p) { CheckPointer(p); }
+
+ void VisitPointers(Object** start, Object** end) {
+ // Check all HeapObject pointers in [start, end).
+ for (Object** p = start; !found() && p < end; p++) CheckPointer(p);
+ }
+
+ inline bool found() const { return found_; }
+ inline bool reset() { return found_ = false; }
+
+ private:
+ inline void CheckPointer(Object** p) {
+ Object* object = *p;
+ if (HeapObject::cast(object) == target_) {
+ // We may want to skip this handle because the handle may be a local
+ // handle in a handle scope in one of our callers. Once we return,
+ // that handle will be popped. Hence, we don't want to count it as
+ // a root that would have kept the target object alive.
+ if (!handle_to_skip_.is_null() &&
+ handle_to_skip_.location() == reinterpret_cast<HeapObject**>(p)) {
+ return; // Skip this handle.
+ }
+ found_ = true;
+ }
+ }
+
+ HeapObject* target_;
+ Handle<HeapObject> handle_to_skip_;
+ bool found_;
+};
+
+
+inline bool AddRootRetainerIfFound(const LolVisitor& visitor,
+ LolFilter* filter,
+ LiveObjectSummary *summary,
+ void (*SetRootFound)(LiveObjectSummary *s),
+ int start,
+ int dump_limit,
+ int* total_count,
+ Handle<FixedArray> retainers_arr,
+ int* count,
+ int* index,
+ const char* root_name,
+ Handle<String> id_sym,
+ Handle<String> desc_sym,
+ Handle<String> size_sym,
+ Handle<Object> error) {
+ HandleScope scope;
+
+ // Scratch handles.
+ Handle<JSObject> detail;
+ Handle<String> desc;
+ Handle<HeapObject> retainer;
+
+ if (visitor.found()) {
+ if (!filter->is_active()) {
+ (*total_count)++;
+ if (summary) {
+ SetRootFound(summary);
+ } else if ((*total_count > start) && ((*index) < dump_limit)) {
+ (*count)++;
+ if (!retainers_arr.is_null()) {
+ return AddObjDetail(retainers_arr,
+ (*index)++,
+ 0,
+ retainer,
+ root_name,
+ id_sym,
+ desc_sym,
+ size_sym,
+ detail,
+ desc,
+ error);
+ }
+ }
+ }
+ }
+ return true;
+}
+
+
+inline void SetFoundRoot(LiveObjectSummary *summary) {
+ summary->set_found_root();
+}
+
+
+inline void SetFoundWeakRoot(LiveObjectSummary *summary) {
+ summary->set_found_weak_root();
+}
+
+
+int LiveObjectList::GetRetainers(Handle<HeapObject> target,
+ Handle<JSObject> instance_filter,
+ Handle<FixedArray> retainers_arr,
+ int start,
+ int dump_limit,
+ int* total_count,
+ LolFilter* filter,
+ LiveObjectSummary *summary,
+ JSFunction* arguments_function,
+ Handle<Object> error) {
+ HandleScope scope;
+
+ // Scratch handles.
+ Handle<JSObject> detail;
+ Handle<String> desc;
+ Handle<HeapObject> retainer;
+
+ // Prefetch some needed symbols.
+ Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
+ Handle<String> desc_sym = Factory::LookupAsciiSymbol("desc");
+ Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+
+ NoHandleAllocation ha;
+ int count = 0;
+ int index = 0;
+ Handle<JSObject> last_obj;
+
+ *total_count = 0;
+
+ // Iterate roots.
+ LolVisitor lol_visitor(*target, target);
+ Heap::IterateStrongRoots(&lol_visitor, VISIT_ALL);
+ if (!AddRootRetainerIfFound(lol_visitor,
+ filter,
+ summary,
+ SetFoundRoot,
+ start,
+ dump_limit,
+ total_count,
+ retainers_arr,
+ &count,
+ &index,
+ "<root>",
+ id_sym,
+ desc_sym,
+ size_sym,
+ error)) {
+ return -1;
+ }
+
+ lol_visitor.reset();
+ Heap::IterateWeakRoots(&lol_visitor, VISIT_ALL);
+ if (!AddRootRetainerIfFound(lol_visitor,
+ filter,
+ summary,
+ SetFoundWeakRoot,
+ start,
+ dump_limit,
+ total_count,
+ retainers_arr,
+ &count,
+ &index,
+ "<weak root>",
+ id_sym,
+ desc_sym,
+ size_sym,
+ error)) {
+ return -1;
+ }
+
+ // Iterate the live object lists.
+ LolIterator it(NULL, last());
+ for (it.Init(); !it.Done() && (index < dump_limit); it.Next()) {
+ HeapObject* heap_obj = it.Obj();
+
+ // Only look at all JSObjects.
+ if (heap_obj->IsJSObject()) {
+ // Skip context extension objects and argument arrays as these are
+ // checked in the context of functions using them.
+ JSObject* obj = JSObject::cast(heap_obj);
+ if (obj->IsJSContextExtensionObject() ||
+ obj->map()->constructor() == arguments_function) {
+ continue;
+ }
+
+ // Check if the JS object has a reference to the object looked for.
+ if (obj->ReferencesObject(*target)) {
+ // Check instance filter if supplied. This is normally used to avoid
+ // references from mirror objects (see Runtime_IsInPrototypeChain).
+ if (!instance_filter->IsUndefined()) {
+ Object* V = obj;
+ while (true) {
+ Object* prototype = V->GetPrototype();
+ if (prototype->IsNull()) {
+ break;
+ }
+ if (*instance_filter == prototype) {
+ obj = NULL; // Don't add this object.
+ break;
+ }
+ V = prototype;
+ }
+ }
+
+ if (obj != NULL) {
+ // Skip objects that have been filtered out.
+ if (filter->Matches(heap_obj)) {
+ continue;
+ }
+
+ // Valid reference found add to instance array if supplied an update
+ // count.
+ last_obj = Handle<JSObject>(obj);
+ (*total_count)++;
+
+ if (summary != NULL) {
+ summary->Add(heap_obj);
+ } else if ((*total_count > start) && (index < dump_limit)) {
+ count++;
+ if (!retainers_arr.is_null()) {
+ retainer = Handle<HeapObject>(heap_obj);
+ bool success = AddObjDetail(retainers_arr,
+ index++,
+ it.Id(),
+ retainer,
+ NULL,
+ id_sym,
+ desc_sym,
+ size_sym,
+ detail,
+ desc,
+ error);
+ if (!success) return -1;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Check for circular reference only. This can happen when the object is only
+ // referenced from mirrors and has a circular reference in which case the
+ // object is not really alive and would have been garbage collected if not
+ // referenced from the mirror.
+
+ if (*total_count == 1 && !last_obj.is_null() && *last_obj == *target) {
+ count = 0;
+ *total_count = 0;
+ }
+
+ return count;
+}
+
+
+MaybeObject* LiveObjectList::GetObjRetainers(int obj_id,
+ Handle<JSObject> instance_filter,
+ bool verbose,
+ int start,
+ int dump_limit,
+ Handle<JSObject> filter_obj) {
+ HandleScope scope;
+
+ // Get the target object.
+ HeapObject* heap_obj = HeapObject::cast(GetObj(obj_id));
+ if (heap_obj == Heap::undefined_value()) {
+ return heap_obj;
+ }
+
+ Handle<HeapObject> target = Handle<HeapObject>(heap_obj);
+
+ // Get the constructor function for context extension and arguments array.
+ JSObject* arguments_boilerplate =
+ Top::context()->global_context()->arguments_boilerplate();
+ JSFunction* arguments_function =
+ JSFunction::cast(arguments_boilerplate->map()->constructor());
+
+ Handle<JSFunction> args_function = Handle<JSFunction>(arguments_function);
+ LolFilter filter(filter_obj);
+
+ if (!verbose) {
+ RetainersSummaryWriter writer(target, instance_filter, args_function);
+ return SummarizePrivate(&writer, &filter, true);
+
+ } else {
+ RetainersDumpWriter writer(target, instance_filter, args_function);
+ Object* body_obj;
+ MaybeObject* maybe_result =
+ DumpPrivate(&writer, start, dump_limit, &filter);
+ if (!maybe_result->ToObject(&body_obj)) {
+ return maybe_result;
+ }
+
+ // Set body.id.
+ Handle<JSObject> body = Handle<JSObject>(JSObject::cast(body_obj));
+ Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
+ maybe_result = body->SetProperty(*id_sym, Smi::FromInt(obj_id), NONE);
+ if (maybe_result->IsFailure()) return maybe_result;
+
+ return *body;
+ }
+}
+
+
+Object* LiveObjectList::PrintObj(int obj_id) {
+ Object* obj = GetObj(obj_id);
+ if (!obj) {
+ return Heap::undefined_value();
+ }
+
+ EmbeddedVector<char, 128> temp_filename;
+ static int temp_count = 0;
+ const char* path_prefix = ".";
+
+ if (FLAG_lol_workdir) {
+ path_prefix = FLAG_lol_workdir;
+ }
+ OS::SNPrintF(temp_filename, "%s/lol-print-%d", path_prefix, ++temp_count);
+
+ FILE* f = OS::FOpen(temp_filename.start(), "w+");
+
+ PrintF(f, "@%d ", LiveObjectList::GetObjId(obj));
+#ifdef OBJECT_PRINT
+#ifdef INSPECTOR
+ Inspector::DumpObjectType(f, obj);
+#endif // INSPECTOR
+ PrintF(f, "\n");
+ obj->Print(f);
+#else // !OBJECT_PRINT
+ obj->ShortPrint(f);
+#endif // !OBJECT_PRINT
+ PrintF(f, "\n");
+ Flush(f);
+ fclose(f);
+
+ // Create a string from the temp_file.
+ // Note: the mmapped resource will take care of closing the file.
+ MemoryMappedExternalResource* resource =
+ new MemoryMappedExternalResource(temp_filename.start(), true);
+ if (resource->exists() && !resource->is_empty()) {
+ ASSERT(resource->IsAscii());
+ Handle<String> dump_string =
+ Factory::NewExternalStringFromAscii(resource);
+ ExternalStringTable::AddString(*dump_string);
+ return *dump_string;
+ } else {
+ delete resource;
+ }
+ return Heap::undefined_value();
+}
+
+
+class LolPathTracer: public PathTracer {
+ public:
+ LolPathTracer(FILE* out,
+ Object* search_target,
+ WhatToFind what_to_find)
+ : PathTracer(search_target, what_to_find, VISIT_ONLY_STRONG), out_(out) {}
+
+ private:
+ void ProcessResults();
+
+ FILE* out_;
+};
+
+
+void LolPathTracer::ProcessResults() {
+ if (found_target_) {
+ PrintF(out_, "=====================================\n");
+ PrintF(out_, "==== Path to object ====\n");
+ PrintF(out_, "=====================================\n\n");
+
+ ASSERT(!object_stack_.is_empty());
+ Object* prev = NULL;
+ for (int i = 0, index = 0; i < object_stack_.length(); i++) {
+ Object* obj = object_stack_[i];
+
+ // Skip this object if it is basically the internals of the
+ // previous object (which would have dumped its details already).
+ if (prev && prev->IsJSObject() &&
+ (obj != search_target_)) {
+ JSObject* jsobj = JSObject::cast(prev);
+ if (obj->IsFixedArray() &&
+ jsobj->properties() == FixedArray::cast(obj)) {
+ // Skip this one because it would have been printed as the
+ // properties of the last object already.
+ continue;
+ } else if (obj->IsHeapObject() &&
+ jsobj->elements() == HeapObject::cast(obj)) {
+ // Skip this one because it would have been printed as the
+ // elements of the last object already.
+ continue;
+ }
+ }
+
+ // Print a connecting arrow.
+ if (i > 0) PrintF(out_, "\n |\n |\n V\n\n");
+
+ // Print the object index.
+ PrintF(out_, "[%d] ", ++index);
+
+ // Print the LOL object ID:
+ int id = LiveObjectList::GetObjId(obj);
+ if (id > 0) PrintF(out_, "@%d ", id);
+
+#ifdef OBJECT_PRINT
+#ifdef INSPECTOR
+ Inspector::DumpObjectType(out_, obj);
+#endif // INSPECTOR
+ PrintF(out_, "\n");
+ obj->Print(out_);
+#else // !OBJECT_PRINT
+ obj->ShortPrint(out_);
+ PrintF(out_, "\n");
+#endif // !OBJECT_PRINT
+ Flush(out_);
+ }
+ PrintF(out_, "\n");
+ PrintF(out_, "=====================================\n\n");
+ Flush(out_);
+ }
+}
+
+
+Object* LiveObjectList::GetPathPrivate(HeapObject* obj1, HeapObject* obj2) {
+ EmbeddedVector<char, 128> temp_filename;
+ static int temp_count = 0;
+ const char* path_prefix = ".";
+
+ if (FLAG_lol_workdir) {
+ path_prefix = FLAG_lol_workdir;
+ }
+ OS::SNPrintF(temp_filename, "%s/lol-getpath-%d", path_prefix, ++temp_count);
+
+ FILE* f = OS::FOpen(temp_filename.start(), "w+");
+
+ // Save the previous verbosity.
+ bool prev_verbosity = FLAG_use_verbose_printer;
+ FLAG_use_verbose_printer = false;
+
+ // Dump the paths.
+ {
+ // The tracer needs to be scoped because its usage asserts no allocation,
+ // and we need to allocate the result string below.
+ LolPathTracer tracer(f, obj2, LolPathTracer::FIND_FIRST);
+
+ bool found = false;
+ if (obj1 == NULL) {
+ // Check for ObjectGroups that references this object.
+ // TODO(mlam): refactor this to be more modular.
+ {
+ List<ObjectGroup*>* groups = GlobalHandles::ObjectGroups();
+ for (int i = 0; i < groups->length(); i++) {
+ ObjectGroup* group = groups->at(i);
+ if (group == NULL) continue;
+
+ bool found_group = false;
+ List<Object**>& objects = group->objects_;
+ for (int j = 0; j < objects.length(); j++) {
+ Object* object = *objects[j];
+ HeapObject* hobj = HeapObject::cast(object);
+ if (obj2 == hobj) {
+ found_group = true;
+ break;
+ }
+ }
+
+ if (found_group) {
+ PrintF(f,
+ "obj %p is a member of object group %p {\n",
+ reinterpret_cast<void*>(obj2),
+ reinterpret_cast<void*>(group));
+ for (int j = 0; j < objects.length(); j++) {
+ Object* object = *objects[j];
+ if (!object->IsHeapObject()) continue;
+
+ HeapObject* hobj = HeapObject::cast(object);
+ int id = GetObjId(hobj);
+ if (id != 0) {
+ PrintF(f, " @%d:", id);
+ } else {
+ PrintF(f, " <no id>:");
+ }
+
+ char buffer[512];
+ GenerateObjectDesc(hobj, buffer, sizeof(buffer));
+ PrintF(f, " %s", buffer);
+ if (hobj == obj2) {
+ PrintF(f, " <===");
+ }
+ PrintF(f, "\n");
+ }
+ PrintF(f, "}\n");
+ }
+ }
+ }
+
+ PrintF(f, "path from roots to obj %p\n", reinterpret_cast<void*>(obj2));
+ Heap::IterateRoots(&tracer, VISIT_ONLY_STRONG);
+ found = tracer.found();
+
+ if (!found) {
+ PrintF(f, " No paths found. Checking symbol tables ...\n");
+ SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
+ tracer.VisitPointers(reinterpret_cast<Object**>(&symbol_table),
+ reinterpret_cast<Object**>(&symbol_table)+1);
+ found = tracer.found();
+ if (!found) {
+ symbol_table->IteratePrefix(&tracer);
+ found = tracer.found();
+ }
+ }
+
+ if (!found) {
+ PrintF(f, " No paths found. Checking weak roots ...\n");
+ // Check weak refs next.
+ GlobalHandles::IterateWeakRoots(&tracer);
+ found = tracer.found();
+ }
+
+ } else {
+ PrintF(f, "path from obj %p to obj %p:\n",
+ reinterpret_cast<void*>(obj1), reinterpret_cast<void*>(obj2));
+ tracer.TracePathFrom(reinterpret_cast<Object**>(&obj1));
+ found = tracer.found();
+ }
+
+ if (!found) {
+ PrintF(f, " No paths found\n\n");
+ }
+ }
+
+ // Flush and clean up the dumped file.
+ Flush(f);
+ fclose(f);
+
+ // Restore the previous verbosity.
+ FLAG_use_verbose_printer = prev_verbosity;
+
+ // Create a string from the temp_file.
+ // Note: the mmapped resource will take care of closing the file.
+ MemoryMappedExternalResource* resource =
+ new MemoryMappedExternalResource(temp_filename.start(), true);
+ if (resource->exists() && !resource->is_empty()) {
+ ASSERT(resource->IsAscii());
+ Handle<String> path_string =
+ Factory::NewExternalStringFromAscii(resource);
+ ExternalStringTable::AddString(*path_string);
+ return *path_string;
+ } else {
+ delete resource;
+ }
+ return Heap::undefined_value();
+}
+
+
+Object* LiveObjectList::GetPath(int obj_id1,
+ int obj_id2,
+ Handle<JSObject> instance_filter) {
+ HandleScope scope;
+
+ // Get the target object.
+ HeapObject* obj1 = NULL;
+ if (obj_id1 != 0) {
+ obj1 = HeapObject::cast(GetObj(obj_id1));
+ if (obj1 == Heap::undefined_value()) {
+ return obj1;
+ }
+ }
+
+ HeapObject* obj2 = HeapObject::cast(GetObj(obj_id2));
+ if (obj2 == Heap::undefined_value()) {
+ return obj2;
+ }
+
+ return GetPathPrivate(obj1, obj2);
+}
+
+
+void LiveObjectList::DoProcessNonLive(HeapObject *obj) {
+ // We should only be called if we have at least one lol to search.
+ ASSERT(last() != NULL);
+ Element* element = last()->Find(obj);
+ if (element != NULL) {
+ NullifyNonLivePointer(&element->obj_);
+ }
+}
+
+
+void LiveObjectList::IterateElementsPrivate(ObjectVisitor* v) {
+ LiveObjectList* lol = last();
+ while (lol != NULL) {
+ Element* elements = lol->elements_;
+ int count = lol->obj_count_;
+ for (int i = 0; i < count; i++) {
+ HeapObject** p = &elements[i].obj_;
+ v->VisitPointer(reinterpret_cast<Object **>(p));
+ }
+ lol = lol->prev_;
+ }
+}
+
+
+// Purpose: Called by GCEpilogue to purge duplicates. Not to be called by
+// anyone else.
+void LiveObjectList::PurgeDuplicates() {
+ bool is_sorted = false;
+ LiveObjectList* lol = last();
+ if (!lol) {
+ return; // Nothing to purge.
+ }
+
+ int total_count = lol->TotalObjCount();
+ if (!total_count) {
+ return; // Nothing to purge.
+ }
+
+ Element* elements = NewArray<Element>(total_count);
+ int count = 0;
+
+ // Copy all the object elements into a consecutive array.
+ while (lol) {
+ memcpy(&elements[count], lol->elements_, lol->obj_count_ * sizeof(Element));
+ count += lol->obj_count_;
+ lol = lol->prev_;
+ }
+ qsort(elements, total_count, sizeof(Element),
+ reinterpret_cast<RawComparer>(CompareElement));
+
+ ASSERT(count == total_count);
+
+ // Iterate over all objects in the consolidated list and check for dups.
+ total_count--;
+ for (int i = 0; i < total_count; ) {
+ Element* curr = &elements[i];
+ HeapObject* curr_obj = curr->obj_;
+ int j = i+1;
+ bool done = false;
+
+ while (!done && (j < total_count)) {
+ // Process if the element's object is still live after the current GC.
+ // Non-live objects will be converted to SMIs i.e. not HeapObjects.
+ if (curr_obj->IsHeapObject()) {
+ Element* next = &elements[j];
+ HeapObject* next_obj = next->obj_;
+ if (next_obj->IsHeapObject()) {
+ if (curr_obj != next_obj) {
+ done = true;
+ continue; // Live object but no match. Move on.
+ }
+
+ // NOTE: we've just GCed the LOLs. Hence, they are no longer sorted.
+ // Since we detected at least one need to search for entries, we'll
+ // sort it to enable the use of NullifyMostRecent() below. We only
+ // need to sort it once (except for one exception ... see below).
+ if (!is_sorted) {
+ SortAll();
+ is_sorted = true;
+ }
+
+ // We have a match. Need to nullify the most recent ref to this
+ // object. We'll keep the oldest ref:
+ // Note: we will nullify the element record in the LOL
+ // database, not in the local sorted copy of the elements.
+ NullifyMostRecent(curr_obj);
+ }
+ }
+ // Either the object was already marked for purging, or we just marked
+ // it. Either way, if there's more than one dup, then we need to check
+ // the next element for another possible dup against the current as well
+ // before we move on. So, here we go.
+ j++;
+ }
+
+ // We can move on to checking the match on the next element.
+ i = j;
+ }
+
+ DeleteArray<Element>(elements);
+}
+
+
+// Purpose: Purges dead objects and resorts the LOLs.
+void LiveObjectList::GCEpiloguePrivate() {
+ // Note: During the GC, ConsStrings may be collected and pointers may be
+ // forwarded to its constituent string. As a result, we may find dupes of
+ // objects references in the LOL list.
+ // Another common way we get dups is that free chunks that have been swept
+ // in the oldGen heap may be kept as ByteArray objects in a free list.
+ //
+ // When we promote live objects from the youngGen, the object may be moved
+ // to the start of these free chunks. Since there is no free or move event
+ // for the free chunks, their addresses will show up 2 times: once for their
+ // original free ByteArray selves, and once for the newly promoted youngGen
+ // object. Hence, we can get a duplicate address in the LOL again.
+ //
+ // We need to eliminate these dups because the LOL implementation expects to
+ // only have at most one unique LOL reference to any object at any time.
+ PurgeDuplicates();
+
+ // After the GC, sweep away all free'd Elements and compact.
+ LiveObjectList *prev = NULL;
+ LiveObjectList *next = NULL;
+
+ // Iterating from the youngest lol to the oldest lol.
+ for (LiveObjectList *lol = last(); lol; lol = prev) {
+ Element* elements = lol->elements_;
+ prev = lol->prev(); // Save the prev.
+
+ // Remove any references to collected objects.
+ int i = 0;
+ while (i < lol->obj_count_) {
+ Element& element = elements[i];
+ if (!element.obj_->IsHeapObject()) {
+ // If the HeapObject address was converted into a SMI, then this
+ // is a dead object. Copy the last element over this one.
+ element = elements[lol->obj_count_ - 1];
+ lol->obj_count_--;
+ // We've just moved the last element into this index. We'll revisit
+ // this index again. Hence, no need to increment the iterator.
+ } else {
+ i++; // Look at the next element next.
+ }
+ }
+
+ int new_count = lol->obj_count_;
+
+ // Check if there are any more elements to keep after purging the dead ones.
+ if (new_count == 0) {
+ DeleteArray<Element>(elements);
+ lol->elements_ = NULL;
+ lol->capacity_ = 0;
+ ASSERT(lol->obj_count_ == 0);
+
+ // If the list is also invisible, the clean up the list as well.
+ if (lol->id_ == 0) {
+ // Point the next lol's prev to this lol's prev.
+ if (next) {
+ next->prev_ = lol->prev_;
+ } else {
+ last_ = lol->prev_;
+ }
+
+ // Delete this now empty and invisible lol.
+ delete lol;
+
+ // Don't point the next to this lol since it is now deleted.
+ // Leave the next pointer pointing to the current lol.
+ continue;
+ }
+
+ } else {
+ // If the obj_count_ is less than the capacity and the difference is
+ // greater than a specified threshold, then we should shrink the list.
+ int diff = lol->capacity_ - new_count;
+ const int kMaxUnusedSpace = 64;
+ if (diff > kMaxUnusedSpace) { // Threshold for shrinking.
+ // Shrink the list.
+ Element *new_elements = NewArray<Element>(new_count);
+ memcpy(new_elements, elements, new_count * sizeof(Element));
+
+ DeleteArray<Element>(elements);
+ lol->elements_ = new_elements;
+ lol->capacity_ = new_count;
+ }
+ ASSERT(lol->obj_count_ == new_count);
+
+ lol->Sort(); // We've moved objects. Re-sort in case.
+ }
+
+ // Save the next (for the previous link) in case we need it later.
+ next = lol;
+ }
+
+#ifdef VERIFY_LOL
+ if (FLAG_verify_lol) {
+ Verify();
+ }
+#endif
+}
+
+
+#ifdef VERIFY_LOL
+void LiveObjectList::Verify(bool match_heap_exactly) {
+ OS::Print("Verifying the LiveObjectList database:\n");
+
+ LiveObjectList* lol = last();
+ if (lol == NULL) {
+ OS::Print(" No lol database to verify\n");
+ return;
+ }
+
+ OS::Print(" Preparing the lol database ...\n");
+ int total_count = lol->TotalObjCount();
+
+ Element* elements = NewArray<Element>(total_count);
+ int count = 0;
+
+ // Copy all the object elements into a consecutive array.
+ OS::Print(" Copying the lol database ...\n");
+ while (lol != NULL) {
+ memcpy(&elements[count], lol->elements_, lol->obj_count_ * sizeof(Element));
+ count += lol->obj_count_;
+ lol = lol->prev_;
+ }
+ qsort(elements, total_count, sizeof(Element),
+ reinterpret_cast<RawComparer>(CompareElement));
+
+ ASSERT(count == total_count);
+
+ // Iterate over all objects in the heap and check for:
+ // 1. object in LOL but not in heap i.e. error.
+ // 2. object in heap but not in LOL (possibly not an error). Usually
+ // just means that we don't have the a capture of the latest heap.
+ // That is unless we did this verify immediately after a capture,
+ // and specified match_heap_exactly = true.
+
+ int number_of_heap_objects = 0;
+ int number_of_matches = 0;
+ int number_not_in_heap = total_count;
+ int number_not_in_lol = 0;
+
+ OS::Print(" Start verify ...\n");
+ OS::Print(" Verifying ...");
+ Flush();
+ HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
+ HeapObject* heap_obj = NULL;
+ while ((heap_obj = iterator.next()) != NULL) {
+ number_of_heap_objects++;
+
+ // Check if the heap_obj is in the lol.
+ Element key;
+ key.obj_ = heap_obj;
+
+ Element* result = reinterpret_cast<Element*>(
+ bsearch(&key, elements, total_count, sizeof(Element),
+ reinterpret_cast<RawComparer>(CompareElement)));
+
+ if (result != NULL) {
+ number_of_matches++;
+ number_not_in_heap--;
+ // Mark it as found by changing it into a SMI (mask off low bit).
+ // Note: we cannot use HeapObject::cast() here because it asserts that
+ // the HeapObject bit is set on the address, but we're unsetting it on
+ // purpose here for our marking.
+ result->obj_ = reinterpret_cast<HeapObject*>(heap_obj->address());
+
+ } else {
+ number_not_in_lol++;
+ if (match_heap_exactly) {
+ OS::Print("heap object %p NOT in lol database\n", heap_obj);
+ }
+ }
+ // Show some sign of life.
+ if (number_of_heap_objects % 1000 == 0) {
+ OS::Print(".");
+ fflush(stdout);
+ }
+ }
+ OS::Print("\n");
+
+ // Reporting lol objects not found in the heap.
+ if (number_not_in_heap) {
+ int found = 0;
+ for (int i = 0; (i < total_count) && (found < number_not_in_heap); i++) {
+ Element& element = elements[i];
+ if (element.obj_->IsHeapObject()) {
+ OS::Print("lol database object [%d of %d] %p NOT in heap\n",
+ i, total_count, element.obj_);
+ found++;
+ }
+ }
+ }
+
+ DeleteArray<Element>(elements);
+
+ OS::Print("number of objects in lol database %d\n", total_count);
+ OS::Print("number of heap objects .......... %d\n", number_of_heap_objects);
+ OS::Print("number of matches ............... %d\n", number_of_matches);
+ OS::Print("number NOT in heap .............. %d\n", number_not_in_heap);
+ OS::Print("number NOT in lol database ...... %d\n", number_not_in_lol);
+
+ if (number_of_matches != total_count) {
+ OS::Print(" *** ERROR: "
+ "NOT all lol database objects match heap objects.\n");
+ }
+ if (number_not_in_heap != 0) {
+ OS::Print(" *** ERROR: %d lol database objects not found in heap.\n",
+ number_not_in_heap);
+ }
+ if (match_heap_exactly) {
+ if (!(number_not_in_lol == 0)) {
+ OS::Print(" *** ERROR: %d heap objects NOT found in lol database.\n",
+ number_not_in_lol);
+ }
+ }
+
+ ASSERT(number_of_matches == total_count);
+ ASSERT(number_not_in_heap == 0);
+ ASSERT(number_not_in_lol == (number_of_heap_objects - total_count));
+ if (match_heap_exactly) {
+ ASSERT(total_count == number_of_heap_objects);
+ ASSERT(number_not_in_lol == 0);
+ }
+
+ OS::Print(" Verify the lol database is sorted ...\n");
+ lol = last();
+ while (lol != NULL) {
+ Element* elements = lol->elements_;
+ for (int i = 0; i < lol->obj_count_ - 1; i++) {
+ if (elements[i].obj_ >= elements[i+1].obj_) {
+ OS::Print(" *** ERROR: lol %p obj[%d] %p > obj[%d] %p\n",
+ lol, i, elements[i].obj_, i+1, elements[i+1].obj_);
+ }
+ }
+ lol = lol->prev_;
+ }
+
+ OS::Print(" DONE verifying.\n\n\n");
+}
+
+
+void LiveObjectList::VerifyNotInFromSpace() {
+ OS::Print("VerifyNotInFromSpace() ...\n");
+ LolIterator it(NULL, last());
+ int i = 0;
+ for (it.Init(); !it.Done(); it.Next()) {
+ HeapObject* heap_obj = it.Obj();
+ if (Heap::InFromSpace(heap_obj)) {
+ OS::Print(" ERROR: VerifyNotInFromSpace: [%d] obj %p in From space %p\n",
+ i++, heap_obj, Heap::new_space()->FromSpaceLow());
+ }
+ }
+}
+#endif // VERIFY_LOL
+
} } // namespace v8::internal
diff --git a/src/liveobjectlist.h b/src/liveobjectlist.h
index 11f5c45..423f8f0 100644
--- a/src/liveobjectlist.h
+++ b/src/liveobjectlist.h
@@ -40,54 +40,225 @@
#ifdef LIVE_OBJECT_LIST
+#ifdef DEBUG
+// The following symbol when defined enables thorough verification of lol data.
+// FLAG_verify_lol will also need to set to true to enable the verification.
+#define VERIFY_LOL
+#endif
-// Temporary stubbed out LiveObjectList implementation.
+
+typedef int LiveObjectType;
+class LolFilter;
+class LiveObjectSummary;
+class DumpWriter;
+class SummaryWriter;
+
+
+// The LiveObjectList is both a mechanism for tracking a live capture of
+// objects in the JS heap, as well as is the data structure which represents
+// each of those captures. Unlike a snapshot, the lol is live. For example,
+// if an object in a captured lol dies and is collected by the GC, the lol
+// will reflect that the object is no longer available. The term
+// LiveObjectList (and lol) is used to describe both the mechanism and the
+// data structure depending on context of use.
+//
+// In captured lols, objects are tracked using their address and an object id.
+// The object id is unique. Once assigned to an object, the object id can never
+// be assigned to another object. That is unless all captured lols are deleted
+// which allows the user to start over with a fresh set of lols and object ids.
+// The uniqueness of the object ids allows the user to track specific objects
+// and inspect its longevity while debugging JS code in execution.
+//
+// The lol comes with utility functions to capture, dump, summarize, and diff
+// captured lols amongst other functionality. These functionality are
+// accessible via the v8 debugger interface.
class LiveObjectList {
public:
- inline static void GCEpilogue() {}
- inline static void GCPrologue() {}
- inline static void IterateElements(ObjectVisitor* v) {}
- inline static void ProcessNonLive(HeapObject *obj) {}
- inline static void UpdateReferencesForScavengeGC() {}
+ inline static void GCEpilogue();
+ inline static void GCPrologue();
+ inline static void IterateElements(ObjectVisitor* v);
+ inline static void ProcessNonLive(HeapObject *obj);
+ inline static void UpdateReferencesForScavengeGC();
- static MaybeObject* Capture() { return Heap::undefined_value(); }
- static bool Delete(int id) { return false; }
+ // Note: LOLs can be listed by calling Dump(0, <lol id>), and 2 LOLs can be
+ // compared/diff'ed using Dump(<lol id1>, <lol id2>, ...). This will yield
+ // a verbose dump of all the objects in the resultant lists.
+ // Similarly, a summarized result of a LOL listing or a diff can be
+ // attained using the Summarize(0, <lol id>) and Summarize(<lol id1,
+ // <lol id2>, ...) respectively.
+
+ static MaybeObject* Capture();
+ static bool Delete(int id);
static MaybeObject* Dump(int id1,
int id2,
int start_idx,
int dump_limit,
- Handle<JSObject> filter_obj) {
- return Heap::undefined_value();
- }
- static MaybeObject* Info(int start_idx, int dump_limit) {
- return Heap::undefined_value();
- }
- static MaybeObject* Summarize(int id1,
- int id2,
- Handle<JSObject> filter_obj) {
- return Heap::undefined_value();
- }
+ Handle<JSObject> filter_obj);
+ static MaybeObject* Info(int start_idx, int dump_limit);
+ static MaybeObject* Summarize(int id1, int id2, Handle<JSObject> filter_obj);
- static void Reset() {}
- static Object* GetObj(int obj_id) { return Heap::undefined_value(); }
- static Object* GetObjId(Handle<String> address) {
- return Heap::undefined_value();
- }
+ static void Reset();
+ static Object* GetObj(int obj_id);
+ static int GetObjId(Object* obj);
+ static Object* GetObjId(Handle<String> address);
static MaybeObject* GetObjRetainers(int obj_id,
Handle<JSObject> instance_filter,
bool verbose,
int start,
int count,
- Handle<JSObject> filter_obj) {
- return Heap::undefined_value();
- }
+ Handle<JSObject> filter_obj);
static Object* GetPath(int obj_id1,
int obj_id2,
- Handle<JSObject> instance_filter) {
- return Heap::undefined_value();
+ Handle<JSObject> instance_filter);
+ static Object* PrintObj(int obj_id);
+
+ private:
+
+ struct Element {
+ int id_;
+ HeapObject* obj_;
+ };
+
+ explicit LiveObjectList(LiveObjectList* prev, int capacity);
+ ~LiveObjectList();
+
+ static void GCEpiloguePrivate();
+ static void IterateElementsPrivate(ObjectVisitor* v);
+
+ static void DoProcessNonLive(HeapObject *obj);
+
+ static int CompareElement(const Element* a, const Element* b);
+
+ static Object* GetPathPrivate(HeapObject* obj1, HeapObject* obj2);
+
+ static int GetRetainers(Handle<HeapObject> target,
+ Handle<JSObject> instance_filter,
+ Handle<FixedArray> retainers_arr,
+ int start,
+ int dump_limit,
+ int* total_count,
+ LolFilter* filter,
+ LiveObjectSummary *summary,
+ JSFunction* arguments_function,
+ Handle<Object> error);
+
+ static MaybeObject* DumpPrivate(DumpWriter* writer,
+ int start,
+ int dump_limit,
+ LolFilter* filter);
+ static MaybeObject* SummarizePrivate(SummaryWriter* writer,
+ LolFilter* filter,
+ bool is_tracking_roots);
+
+ static bool NeedLOLProcessing() { return (last() != NULL); }
+ static void NullifyNonLivePointer(HeapObject **p) {
+ // Mask out the low bit that marks this as a heap object. We'll use this
+ // cleared bit as an indicator that this pointer needs to be collected.
+ //
+ // Meanwhile, we still preserve its approximate value so that we don't
+ // have to resort the elements list all the time.
+ //
+ // Note: Doing so also makes this HeapObject* look like an SMI. Hence,
+ // GC pointer updater will ignore it when it gets scanned.
+ *p = reinterpret_cast<HeapObject*>((*p)->address());
}
- static Object* PrintObj(int obj_id) { return Heap::undefined_value(); }
+
+ LiveObjectList* prev() { return prev_; }
+ LiveObjectList* next() { return next_; }
+ int id() { return id_; }
+
+ static int list_count() { return list_count_; }
+ static LiveObjectList* last() { return last_; }
+
+ inline static LiveObjectList* FindLolForId(int id, LiveObjectList* start_lol);
+ int TotalObjCount() { return GetTotalObjCountAndSize(NULL); }
+ int GetTotalObjCountAndSize(int* size_p);
+
+ bool Add(HeapObject* obj);
+ Element* Find(HeapObject* obj);
+ static void NullifyMostRecent(HeapObject* obj);
+ void Sort();
+ static void SortAll();
+
+ static void PurgeDuplicates(); // Only to be called by GCEpilogue.
+
+#ifdef VERIFY_LOL
+ static void Verify(bool match_heap_exactly = false);
+ static void VerifyNotInFromSpace();
+#endif
+
+ // Iterates the elements in every lol and returns the one that matches the
+ // specified key. If no matching element is found, then it returns NULL.
+ template <typename T>
+ inline static LiveObjectList::Element*
+ FindElementFor(T (*GetValue)(LiveObjectList::Element*), T key);
+
+ inline static int GetElementId(Element* element);
+ inline static HeapObject* GetElementObj(Element* element);
+
+ // Instance fields.
+ LiveObjectList* prev_;
+ LiveObjectList* next_;
+ int id_;
+ int capacity_;
+ int obj_count_;
+ Element *elements_;
+
+ // Statics for managing all the lists.
+ static uint32_t next_element_id_;
+ static int list_count_;
+ static int last_id_;
+ static LiveObjectList* first_;
+ static LiveObjectList* last_;
+
+ friend class LolIterator;
+ friend class LolForwardIterator;
+ friend class LolDumpWriter;
+ friend class RetainersDumpWriter;
+ friend class RetainersSummaryWriter;
+ friend class UpdateLiveObjectListVisitor;
+};
+
+
+// Helper class for updating the LiveObjectList HeapObject pointers.
+class UpdateLiveObjectListVisitor: public ObjectVisitor {
+ public:
+
+ void VisitPointer(Object** p) { UpdatePointer(p); }
+
+ void VisitPointers(Object** start, Object** end) {
+ // Copy all HeapObject pointers in [start, end).
+ for (Object** p = start; p < end; p++) UpdatePointer(p);
+ }
+
+ private:
+ // Based on Heap::ScavengeObject() but only does forwarding of pointers
+ // to live new space objects, and not actually keep them alive.
+ void UpdatePointer(Object** p) {
+ Object* object = *p;
+ if (!Heap::InNewSpace(object)) return;
+
+ HeapObject* heap_obj = HeapObject::cast(object);
+ ASSERT(Heap::InFromSpace(heap_obj));
+
+ // We use the first word (where the map pointer usually is) of a heap
+ // object to record the forwarding pointer. A forwarding pointer can
+ // point to an old space, the code space, or the to space of the new
+ // generation.
+ MapWord first_word = heap_obj->map_word();
+
+ // If the first word is a forwarding address, the object has already been
+ // copied.
+ if (first_word.IsForwardingAddress()) {
+ *p = first_word.ToForwardingAddress();
+ return;
+
+ // Else, it's a dead object.
+ } else {
+ LiveObjectList::NullifyNonLivePointer(reinterpret_cast<HeapObject**>(p));
+ }
+ }
};
@@ -96,11 +267,50 @@
class LiveObjectList {
public:
- static void GCEpilogue() {}
- static void GCPrologue() {}
- static void IterateElements(ObjectVisitor* v) {}
- static void ProcessNonLive(HeapObject *obj) {}
- static void UpdateReferencesForScavengeGC() {}
+ inline static void GCEpilogue() {}
+ inline static void GCPrologue() {}
+ inline static void IterateElements(ObjectVisitor* v) {}
+ inline static void ProcessNonLive(HeapObject* obj) {}
+ inline static void UpdateReferencesForScavengeGC() {}
+
+ inline static MaybeObject* Capture() { return Heap::undefined_value(); }
+ inline static bool Delete(int id) { return false; }
+ inline static MaybeObject* Dump(int id1,
+ int id2,
+ int start_idx,
+ int dump_limit,
+ Handle<JSObject> filter_obj) {
+ return Heap::undefined_value();
+ }
+ inline static MaybeObject* Info(int start_idx, int dump_limit) {
+ return Heap::undefined_value();
+ }
+ inline static MaybeObject* Summarize(int id1,
+ int id2,
+ Handle<JSObject> filter_obj) {
+ return Heap::undefined_value();
+ }
+
+ inline static void Reset() {}
+ inline static Object* GetObj(int obj_id) { return Heap::undefined_value(); }
+ inline static Object* GetObjId(Handle<String> address) {
+ return Heap::undefined_value();
+ }
+ inline static MaybeObject* GetObjRetainers(int obj_id,
+ Handle<JSObject> instance_filter,
+ bool verbose,
+ int start,
+ int count,
+ Handle<JSObject> filter_obj) {
+ return Heap::undefined_value();
+ }
+
+ inline static Object* GetPath(int obj_id1,
+ int obj_id2,
+ Handle<JSObject> instance_filter) {
+ return Heap::undefined_value();
+ }
+ inline static Object* PrintObj(int obj_id) { return Heap::undefined_value(); }
};
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index a3b769a..a4c782c 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -1353,6 +1353,9 @@
// Flush code from collected candidates.
FlushCode::ProcessCandidates();
+
+ // Clean up dead objects from the runtime profiler.
+ RuntimeProfiler::RemoveDeadSamples();
}
@@ -1937,6 +1940,9 @@
// All pointers were updated. Update auxiliary allocation info.
Heap::IncrementYoungSurvivorsCounter(survivors_size);
space->set_age_mark(space->top());
+
+ // Update JSFunction pointers from the runtime profiler.
+ RuntimeProfiler::UpdateSamplesAfterScavenge();
}
@@ -2535,6 +2541,7 @@
state_ = UPDATE_POINTERS;
#endif
UpdatingVisitor updating_visitor;
+ RuntimeProfiler::UpdateSamplesAfterCompact(&updating_visitor);
Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
GlobalHandles::IterateWeakRoots(&updating_visitor);
diff --git a/src/messages.js b/src/messages.js
index b7e57aa..2c94912 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -226,6 +226,10 @@
strict_reserved_word: ["Use of future reserved word in strict mode"],
strict_delete: ["Delete of an unqualified identifier in strict mode."],
strict_delete_property: ["Cannot delete property '", "%0", "' of ", "%1"],
+ strict_const: ["Use of const in strict mode."],
+ strict_function: ["In strict mode code, functions can only be declared at top level or immediately within another function." ],
+ strict_read_only_property: ["Cannot assign to read only property '", "%0", "' of ", "%1"],
+ strict_cannot_assign: ["Cannot assign to read only '", "%0", "' in strict mode"],
};
}
var message_type = %MessageGetType(message);
@@ -1059,8 +1063,8 @@
}
}
-%FunctionSetName(errorToString, 'toString');
-%SetProperty($Error.prototype, 'toString', errorToString, DONT_ENUM);
+
+InstallFunctions($Error.prototype, DONT_ENUM, ['toString', errorToString]);
// Boilerplate for exceptions for stack overflows. Used from
// Top::StackOverflow().
diff --git a/src/objects-inl.h b/src/objects-inl.h
index f955d33..dedb199 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -2619,7 +2619,8 @@
ASSERT(extra_ic_state == kNoExtraICState ||
(kind == CALL_IC && (ic_state == MONOMORPHIC ||
ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)) ||
- (kind == STORE_IC));
+ (kind == STORE_IC) ||
+ (kind == KEYED_STORE_IC));
// Compute the bit mask.
int bits = kind << kFlagsKindShift;
if (in_loop) bits |= kFlagsICInLoopMask;
@@ -3741,7 +3742,8 @@
ASSERT(!IsJSGlobalProxy());
return SetPropertyPostInterceptor(Heap::hidden_symbol(),
hidden_obj,
- DONT_ENUM);
+ DONT_ENUM,
+ kNonStrictMode);
}
diff --git a/src/objects.cc b/src/objects.cc
index 0b1d72a..0b7f60a 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -1444,14 +1444,15 @@
MaybeObject* JSObject::SetPropertyPostInterceptor(
String* name,
Object* value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ StrictModeFlag strict) {
// Check local property, ignore interceptor.
LookupResult result;
LocalLookupRealNamedProperty(name, &result);
if (result.IsFound()) {
// An existing property, a map transition or a null descriptor was
// found. Use set property to handle all these cases.
- return SetProperty(&result, name, value, attributes);
+ return SetProperty(&result, name, value, attributes, strict);
}
// Add a new real property.
return AddProperty(name, value, attributes);
@@ -1576,7 +1577,8 @@
MaybeObject* JSObject::SetPropertyWithInterceptor(
String* name,
Object* value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ StrictModeFlag strict) {
HandleScope scope;
Handle<JSObject> this_handle(this);
Handle<String> name_handle(name);
@@ -1605,7 +1607,8 @@
MaybeObject* raw_result =
this_handle->SetPropertyPostInterceptor(*name_handle,
*value_handle,
- attributes);
+ attributes,
+ strict);
RETURN_IF_SCHEDULED_EXCEPTION();
return raw_result;
}
@@ -1613,10 +1616,11 @@
MaybeObject* JSObject::SetProperty(String* name,
Object* value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ StrictModeFlag strict) {
LookupResult result;
LocalLookup(name, &result);
- return SetProperty(&result, name, value, attributes);
+ return SetProperty(&result, name, value, attributes, strict);
}
@@ -1896,7 +1900,8 @@
MaybeObject* JSObject::SetProperty(LookupResult* result,
String* name,
Object* value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ StrictModeFlag strict) {
// Make sure that the top context does not change when doing callbacks or
// interceptor calls.
AssertNoContextChange ncc;
@@ -1923,7 +1928,8 @@
Object* proto = GetPrototype();
if (proto->IsNull()) return value;
ASSERT(proto->IsJSGlobalObject());
- return JSObject::cast(proto)->SetProperty(result, name, value, attributes);
+ return JSObject::cast(proto)->SetProperty(
+ result, name, value, attributes, strict);
}
if (!result->IsProperty() && !IsJSContextExtensionObject()) {
@@ -1942,7 +1948,19 @@
// Neither properties nor transitions found.
return AddProperty(name, value, attributes);
}
- if (result->IsReadOnly() && result->IsProperty()) return value;
+ if (result->IsReadOnly() && result->IsProperty()) {
+ if (strict == kStrictMode) {
+ HandleScope scope;
+ Handle<String> key(name);
+ Handle<Object> holder(this);
+ Handle<Object> args[2] = { key, holder };
+ return Top::Throw(*Factory::NewTypeError("strict_read_only_property",
+ HandleVector(args, 2)));
+
+ } else {
+ return value;
+ }
+ }
// This is a real property that is not read-only, or it is a
// transition or null descriptor and there are no setters in the prototypes.
switch (result->type()) {
@@ -1970,7 +1988,7 @@
value,
result->holder());
case INTERCEPTOR:
- return SetPropertyWithInterceptor(name, value, attributes);
+ return SetPropertyWithInterceptor(name, value, attributes, strict);
case CONSTANT_TRANSITION: {
// If the same constant function is being added we can simply
// transition to the target map.
@@ -6287,7 +6305,8 @@
}
break;
case STORE_IC:
- if (extra == StoreIC::kStoreICStrict) {
+ case KEYED_STORE_IC:
+ if (extra == kStrictMode) {
name = "STRICT";
}
break;
diff --git a/src/objects.h b/src/objects.h
index fbfc5fd..de15a73 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -1361,11 +1361,13 @@
MUST_USE_RESULT MaybeObject* SetProperty(String* key,
Object* value,
- PropertyAttributes attributes);
+ PropertyAttributes attributes,
+ StrictModeFlag strict);
MUST_USE_RESULT MaybeObject* SetProperty(LookupResult* result,
String* key,
Object* value,
- PropertyAttributes attributes);
+ PropertyAttributes attributes,
+ StrictModeFlag strict);
MUST_USE_RESULT MaybeObject* SetPropertyWithFailedAccessCheck(
LookupResult* result,
String* name,
@@ -1380,11 +1382,13 @@
MUST_USE_RESULT MaybeObject* SetPropertyWithInterceptor(
String* name,
Object* value,
- PropertyAttributes attributes);
+ PropertyAttributes attributes,
+ StrictModeFlag strict);
MUST_USE_RESULT MaybeObject* SetPropertyPostInterceptor(
String* name,
Object* value,
- PropertyAttributes attributes);
+ PropertyAttributes attributes,
+ StrictModeFlag strict);
MUST_USE_RESULT MaybeObject* SetLocalPropertyIgnoreAttributes(
String* key,
Object* value,
diff --git a/src/parser.cc b/src/parser.cc
index 249c9ce..8560310 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -1106,7 +1106,20 @@
}
Scanner::Location token_loc = scanner().peek_location();
- Statement* stat = ParseStatement(NULL, CHECK_OK);
+
+ Statement* stat;
+ if (peek() == Token::FUNCTION) {
+ // FunctionDeclaration is only allowed in the context of SourceElements
+ // (Ecma 262 5th Edition, clause 14):
+ // SourceElement:
+ // Statement
+ // FunctionDeclaration
+ // Common language extension is to allow function declaration in place
+ // of any statement. This language extension is disabled in strict mode.
+ stat = ParseFunctionDeclaration(CHECK_OK);
+ } else {
+ stat = ParseStatement(NULL, CHECK_OK);
+ }
if (stat == NULL || stat->IsEmpty()) {
directive_prologue = false; // End of directive prologue.
@@ -1263,8 +1276,17 @@
return result;
}
- case Token::FUNCTION:
+ case Token::FUNCTION: {
+ // In strict mode, FunctionDeclaration is only allowed in the context
+ // of SourceElements.
+ if (temp_scope_->StrictMode()) {
+ ReportMessageAt(scanner().peek_location(), "strict_function",
+ Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
return ParseFunctionDeclaration(ok);
+ }
case Token::NATIVE:
return ParseNativeDeclaration(ok);
@@ -1515,6 +1537,11 @@
Consume(Token::VAR);
} else if (peek() == Token::CONST) {
Consume(Token::CONST);
+ if (temp_scope_->StrictMode()) {
+ ReportMessage("strict_const", Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
mode = Variable::CONST;
is_const = true;
} else {
@@ -1634,34 +1661,49 @@
if (top_scope_->is_global_scope()) {
// Compute the arguments for the runtime call.
- ZoneList<Expression*>* arguments = new ZoneList<Expression*>(2);
- // Be careful not to assign a value to the global variable if
- // we're in a with. The initialization value should not
- // necessarily be stored in the global object in that case,
- // which is why we need to generate a separate assignment node.
+ ZoneList<Expression*>* arguments = new ZoneList<Expression*>(3);
arguments->Add(new Literal(name)); // we have at least 1 parameter
- if (is_const || (value != NULL && !inside_with())) {
+ CallRuntime* initialize;
+
+ if (is_const) {
arguments->Add(value);
value = NULL; // zap the value to avoid the unnecessary assignment
- }
- // Construct the call to Runtime::DeclareGlobal{Variable,Const}Locally
- // and add it to the initialization statement block. Note that
- // this function does different things depending on if we have
- // 1 or 2 parameters.
- CallRuntime* initialize;
- if (is_const) {
+
+ // Construct the call to Runtime_InitializeConstGlobal
+ // and add it to the initialization statement block.
+ // Note that the function does different things depending on
+ // the number of arguments (1 or 2).
initialize =
- new CallRuntime(
- Factory::InitializeConstGlobal_symbol(),
- Runtime::FunctionForId(Runtime::kInitializeConstGlobal),
- arguments);
+ new CallRuntime(
+ Factory::InitializeConstGlobal_symbol(),
+ Runtime::FunctionForId(Runtime::kInitializeConstGlobal),
+ arguments);
} else {
+ // Add strict mode.
+ // We may want to pass singleton to avoid Literal allocations.
+ arguments->Add(NewNumberLiteral(
+ temp_scope_->StrictMode() ? kStrictMode : kNonStrictMode));
+
+ // Be careful not to assign a value to the global variable if
+ // we're in a with. The initialization value should not
+ // necessarily be stored in the global object in that case,
+ // which is why we need to generate a separate assignment node.
+ if (value != NULL && !inside_with()) {
+ arguments->Add(value);
+ value = NULL; // zap the value to avoid the unnecessary assignment
+ }
+
+ // Construct the call to Runtime_InitializeVarGlobal
+ // and add it to the initialization statement block.
+ // Note that the function does different things depending on
+ // the number of arguments (2 or 3).
initialize =
- new CallRuntime(
- Factory::InitializeVarGlobal_symbol(),
- Runtime::FunctionForId(Runtime::kInitializeVarGlobal),
- arguments);
+ new CallRuntime(
+ Factory::InitializeVarGlobal_symbol(),
+ Runtime::FunctionForId(Runtime::kInitializeVarGlobal),
+ arguments);
}
+
block->AddStatement(new ExpressionStatement(initialize));
}
diff --git a/src/profile-generator-inl.h b/src/profile-generator-inl.h
index 4bcfa9b..747e5c7 100644
--- a/src/profile-generator-inl.h
+++ b/src/profile-generator-inl.h
@@ -121,34 +121,6 @@
return id_adaptor.returned_id;
}
-
-template<class Visitor>
-void HeapEntriesMap::UpdateEntries(Visitor* visitor) {
- for (HashMap::Entry* p = entries_.Start();
- p != NULL;
- p = entries_.Next(p)) {
- EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(p->value);
- entry_info->entry = visitor->GetEntry(
- reinterpret_cast<HeapObject*>(p->key),
- entry_info->children_count,
- entry_info->retainers_count);
- entry_info->children_count = 0;
- entry_info->retainers_count = 0;
- }
-}
-
-
-bool HeapSnapshotGenerator::ReportProgress(bool force) {
- const int kProgressReportGranularity = 10000;
- if (control_ != NULL
- && (force || progress_counter_ % kProgressReportGranularity == 0)) {
- return
- control_->ReportProgressValue(progress_counter_, progress_total_) ==
- v8::ActivityControl::kContinue;
- }
- return true;
-}
-
} } // namespace v8::internal
#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/src/profile-generator.cc b/src/profile-generator.cc
index 261b3d6..7612eab 100644
--- a/src/profile-generator.cc
+++ b/src/profile-generator.cc
@@ -1177,12 +1177,6 @@
}
-HeapObject *const HeapSnapshot::kInternalRootObject =
- reinterpret_cast<HeapObject*>(1);
-HeapObject *const HeapSnapshot::kGcRootsObject =
- reinterpret_cast<HeapObject*>(2);
-
-
// It is very important to keep objects that form a heap snapshot
// as small as possible.
namespace { // Avoid littering the global namespace.
@@ -1253,96 +1247,6 @@
}
-HeapEntry* HeapSnapshot::AddEntry(HeapObject* object,
- int children_count,
- int retainers_count) {
- if (object == kInternalRootObject) {
- ASSERT(root_entry_ == NULL);
- ASSERT(retainers_count == 0);
- return (root_entry_ = AddEntry(HeapEntry::kObject,
- "",
- HeapObjectsMap::kInternalRootObjectId,
- 0,
- children_count,
- retainers_count));
- } else if (object == kGcRootsObject) {
- ASSERT(gc_roots_entry_ == NULL);
- return (gc_roots_entry_ = AddEntry(HeapEntry::kObject,
- "(GC roots)",
- HeapObjectsMap::kGcRootsObjectId,
- 0,
- children_count,
- retainers_count));
- } else if (object->IsJSFunction()) {
- JSFunction* func = JSFunction::cast(object);
- SharedFunctionInfo* shared = func->shared();
- return AddEntry(object,
- HeapEntry::kClosure,
- collection_->GetName(String::cast(shared->name())),
- children_count,
- retainers_count);
- } else if (object->IsJSRegExp()) {
- JSRegExp* re = JSRegExp::cast(object);
- return AddEntry(object,
- HeapEntry::kRegExp,
- collection_->GetName(re->Pattern()),
- children_count,
- retainers_count);
- } else if (object->IsJSObject()) {
- return AddEntry(object,
- HeapEntry::kObject,
- collection_->GetName(GetConstructorNameForHeapProfile(
- JSObject::cast(object))),
- children_count,
- retainers_count);
- } else if (object->IsString()) {
- return AddEntry(object,
- HeapEntry::kString,
- collection_->GetName(String::cast(object)),
- children_count,
- retainers_count);
- } else if (object->IsCode()) {
- return AddEntry(object,
- HeapEntry::kCode,
- "",
- children_count,
- retainers_count);
- } else if (object->IsSharedFunctionInfo()) {
- SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
- return AddEntry(object,
- HeapEntry::kCode,
- collection_->GetName(String::cast(shared->name())),
- children_count,
- retainers_count);
- } else if (object->IsScript()) {
- Script* script = Script::cast(object);
- return AddEntry(object,
- HeapEntry::kCode,
- script->name()->IsString() ?
- collection_->GetName(String::cast(script->name())) : "",
- children_count,
- retainers_count);
- } else if (object->IsFixedArray()) {
- return AddEntry(object,
- HeapEntry::kArray,
- "",
- children_count,
- retainers_count);
- } else if (object->IsHeapNumber()) {
- return AddEntry(object,
- HeapEntry::kHeapNumber,
- "number",
- children_count,
- retainers_count);
- }
- return AddEntry(object,
- HeapEntry::kHidden,
- "system",
- children_count,
- retainers_count);
-}
-
-
static void HeapEntryClearPaint(HeapEntry** entry_ptr) {
(*entry_ptr)->clear_paint();
}
@@ -1352,17 +1256,26 @@
}
-HeapEntry* HeapSnapshot::AddEntry(HeapObject* object,
- HeapEntry::Type type,
- const char* name,
- int children_count,
- int retainers_count) {
- return AddEntry(type,
- name,
- collection_->GetObjectId(object->address()),
- object->Size(),
- children_count,
- retainers_count);
+HeapEntry* HeapSnapshot::AddRootEntry(int children_count) {
+ ASSERT(root_entry_ == NULL);
+ return (root_entry_ = AddEntry(HeapEntry::kObject,
+ "",
+ HeapObjectsMap::kInternalRootObjectId,
+ 0,
+ children_count,
+ 0));
+}
+
+
+HeapEntry* HeapSnapshot::AddGcRootsEntry(int children_count,
+ int retainers_count) {
+ ASSERT(gc_roots_entry_ == NULL);
+ return (gc_roots_entry_ = AddEntry(HeapEntry::kObject,
+ "(GC roots)",
+ HeapObjectsMap::kGcRootsObjectId,
+ 0,
+ children_count,
+ retainers_count));
}
@@ -1615,7 +1528,7 @@
reinterpret_cast<HeapEntry*>(1);
HeapEntriesMap::HeapEntriesMap()
- : entries_(HeapObjectsMatch),
+ : entries_(HeapThingsMatch),
entries_count_(0),
total_children_count_(0),
total_retainers_count_(0) {
@@ -1629,8 +1542,23 @@
}
-HeapEntry* HeapEntriesMap::Map(HeapObject* object) {
- HashMap::Entry* cache_entry = entries_.Lookup(object, Hash(object), false);
+void HeapEntriesMap::AllocateEntries() {
+ for (HashMap::Entry* p = entries_.Start();
+ p != NULL;
+ p = entries_.Next(p)) {
+ EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(p->value);
+ entry_info->entry = entry_info->allocator->AllocateEntry(
+ p->key,
+ entry_info->children_count,
+ entry_info->retainers_count);
+ entry_info->children_count = 0;
+ entry_info->retainers_count = 0;
+ }
+}
+
+
+HeapEntry* HeapEntriesMap::Map(HeapThing thing) {
+ HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
if (cache_entry != NULL) {
EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(cache_entry->value);
return entry_info->entry;
@@ -1640,15 +1568,16 @@
}
-void HeapEntriesMap::Pair(HeapObject* object, HeapEntry* entry) {
- HashMap::Entry* cache_entry = entries_.Lookup(object, Hash(object), true);
+void HeapEntriesMap::Pair(
+ HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry) {
+ HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
ASSERT(cache_entry->value == NULL);
- cache_entry->value = new EntryInfo(entry);
+ cache_entry->value = new EntryInfo(entry, allocator);
++entries_count_;
}
-void HeapEntriesMap::CountReference(HeapObject* from, HeapObject* to,
+void HeapEntriesMap::CountReference(HeapThing from, HeapThing to,
int* prev_children_count,
int* prev_retainers_count) {
HashMap::Entry* from_cache_entry = entries_.Lookup(from, Hash(from), false);
@@ -1671,7 +1600,7 @@
HeapObjectsSet::HeapObjectsSet()
- : entries_(HeapEntriesMap::HeapObjectsMatch) {
+ : entries_(HeapEntriesMap::HeapThingsMatch) {
}
@@ -1700,206 +1629,144 @@
}
-HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
- v8::ActivityControl* control)
+HeapObject *const V8HeapExplorer::kInternalRootObject =
+ reinterpret_cast<HeapObject*>(1);
+HeapObject *const V8HeapExplorer::kGcRootsObject =
+ reinterpret_cast<HeapObject*>(2);
+
+
+V8HeapExplorer::V8HeapExplorer(
+ HeapSnapshot* snapshot,
+ SnapshottingProgressReportingInterface* progress)
: snapshot_(snapshot),
- control_(control),
- collection_(snapshot->collection()),
+ collection_(snapshot_->collection()),
+ progress_(progress),
filler_(NULL) {
}
-class SnapshotCounter : public HeapSnapshotGenerator::SnapshotFillerInterface {
- public:
- explicit SnapshotCounter(HeapEntriesMap* entries)
- : entries_(entries) { }
- HeapEntry* AddEntry(HeapObject* obj) {
- entries_->Pair(obj, HeapEntriesMap::kHeapEntryPlaceholder);
- return HeapEntriesMap::kHeapEntryPlaceholder;
- }
- void SetIndexedReference(HeapGraphEdge::Type,
- HeapObject* parent_obj,
- HeapEntry*,
- int,
- Object* child_obj,
- HeapEntry*) {
- entries_->CountReference(parent_obj, HeapObject::cast(child_obj));
- }
- void SetNamedReference(HeapGraphEdge::Type,
- HeapObject* parent_obj,
- HeapEntry*,
- const char*,
- Object* child_obj,
- HeapEntry*) {
- entries_->CountReference(parent_obj, HeapObject::cast(child_obj));
- }
- void SetRootShortcutReference(Object* child_obj, HeapEntry*) {
- entries_->CountReference(
- HeapSnapshot::kInternalRootObject, HeapObject::cast(child_obj));
- }
- void SetRootGcRootsReference() {
- entries_->CountReference(
- HeapSnapshot::kInternalRootObject, HeapSnapshot::kGcRootsObject);
- }
- void SetStrongRootReference(Object* child_obj, HeapEntry*) {
- entries_->CountReference(
- HeapSnapshot::kGcRootsObject, HeapObject::cast(child_obj));
- }
- private:
- HeapEntriesMap* entries_;
-};
-
-class SnapshotFiller : public HeapSnapshotGenerator::SnapshotFillerInterface {
- public:
- explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
- : snapshot_(snapshot),
- collection_(snapshot->collection()),
- entries_(entries) { }
- HeapEntry* AddEntry(HeapObject* obj) {
- UNREACHABLE();
- return NULL;
- }
- void SetIndexedReference(HeapGraphEdge::Type type,
- HeapObject* parent_obj,
- HeapEntry* parent_entry,
- int index,
- Object* child_obj,
- HeapEntry* child_entry) {
- int child_index, retainer_index;
- entries_->CountReference(parent_obj,
- HeapObject::cast(child_obj),
- &child_index,
- &retainer_index);
- parent_entry->SetIndexedReference(
- type, child_index, index, child_entry, retainer_index);
- }
- void SetNamedReference(HeapGraphEdge::Type type,
- HeapObject* parent_obj,
- HeapEntry* parent_entry,
- const char* reference_name,
- Object* child_obj,
- HeapEntry* child_entry) {
- int child_index, retainer_index;
- entries_->CountReference(parent_obj, HeapObject::cast(child_obj),
- &child_index, &retainer_index);
- parent_entry->SetNamedReference(type,
- child_index,
- reference_name,
- child_entry,
- retainer_index);
- }
- void SetRootGcRootsReference() {
- int child_index, retainer_index;
- entries_->CountReference(HeapSnapshot::kInternalRootObject,
- HeapSnapshot::kGcRootsObject,
- &child_index,
- &retainer_index);
- snapshot_->root()->SetIndexedReference(HeapGraphEdge::kElement,
- child_index,
- child_index + 1,
- snapshot_->gc_roots(),
- retainer_index);
- }
- void SetRootShortcutReference(Object* child_obj,
- HeapEntry* child_entry) {
- int child_index, retainer_index;
- entries_->CountReference(HeapSnapshot::kInternalRootObject,
- HeapObject::cast(child_obj),
- &child_index,
- &retainer_index);
- snapshot_->root()->SetNamedReference(HeapGraphEdge::kShortcut,
- child_index,
- collection_->GetName(child_index + 1),
- child_entry,
- retainer_index);
- }
- void SetStrongRootReference(Object* child_obj,
- HeapEntry* child_entry) {
- int child_index, retainer_index;
- entries_->CountReference(HeapSnapshot::kGcRootsObject,
- HeapObject::cast(child_obj),
- &child_index,
- &retainer_index);
- snapshot_->gc_roots()->SetIndexedReference(HeapGraphEdge::kElement,
- child_index,
- child_index + 1,
- child_entry,
- retainer_index);
- }
- private:
- HeapSnapshot* snapshot_;
- HeapSnapshotsCollection* collection_;
- HeapEntriesMap* entries_;
-};
-
-class SnapshotAllocator {
- public:
- explicit SnapshotAllocator(HeapSnapshot* snapshot)
- : snapshot_(snapshot) { }
- HeapEntry* GetEntry(
- HeapObject* obj, int children_count, int retainers_count) {
- HeapEntry* entry =
- snapshot_->AddEntry(obj, children_count, retainers_count);
- ASSERT(entry != NULL);
- return entry;
- }
- private:
- HeapSnapshot* snapshot_;
-};
-
-class RootsReferencesExtractor : public ObjectVisitor {
- public:
- explicit RootsReferencesExtractor(HeapSnapshotGenerator* generator)
- : generator_(generator) {
- }
- void VisitPointers(Object** start, Object** end) {
- for (Object** p = start; p < end; p++) generator_->SetGcRootsReference(*p);
- }
- private:
- HeapSnapshotGenerator* generator_;
-};
-
-
-bool HeapSnapshotGenerator::GenerateSnapshot() {
- AssertNoAllocation no_alloc;
-
- SetProgressTotal(4); // 2 passes + dominators + sizes.
-
- // Pass 1. Iterate heap contents to count entries and references.
- if (!CountEntriesAndReferences()) return false;
-
- // Allocate and fill entries in the snapshot, allocate references.
- snapshot_->AllocateEntries(entries_.entries_count(),
- entries_.total_children_count(),
- entries_.total_retainers_count());
- SnapshotAllocator allocator(snapshot_);
- entries_.UpdateEntries(&allocator);
-
- // Pass 2. Fill references.
- if (!FillReferences()) return false;
-
- if (!SetEntriesDominators()) return false;
- if (!ApproximateRetainedSizes()) return false;
-
- progress_counter_ = progress_total_;
- if (!ReportProgress(true)) return false;
- return true;
+V8HeapExplorer::~V8HeapExplorer() {
}
-HeapEntry* HeapSnapshotGenerator::GetEntry(Object* obj) {
- if (!obj->IsHeapObject()) return NULL;
- HeapObject* object = HeapObject::cast(obj);
- HeapEntry* entry = entries_.Map(object);
- // A new entry.
- if (entry == NULL) entry = filler_->AddEntry(object);
- return entry;
+HeapEntry* V8HeapExplorer::AllocateEntry(
+ HeapThing ptr, int children_count, int retainers_count) {
+ return AddEntry(
+ reinterpret_cast<HeapObject*>(ptr), children_count, retainers_count);
+}
+
+
+HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
+ int children_count,
+ int retainers_count) {
+ if (object == kInternalRootObject) {
+ ASSERT(retainers_count == 0);
+ return snapshot_->AddRootEntry(children_count);
+ } else if (object == kGcRootsObject) {
+ return snapshot_->AddGcRootsEntry(children_count, retainers_count);
+ } else if (object->IsJSFunction()) {
+ JSFunction* func = JSFunction::cast(object);
+ SharedFunctionInfo* shared = func->shared();
+ return AddEntry(object,
+ HeapEntry::kClosure,
+ collection_->GetName(String::cast(shared->name())),
+ children_count,
+ retainers_count);
+ } else if (object->IsJSRegExp()) {
+ JSRegExp* re = JSRegExp::cast(object);
+ return AddEntry(object,
+ HeapEntry::kRegExp,
+ collection_->GetName(re->Pattern()),
+ children_count,
+ retainers_count);
+ } else if (object->IsJSObject()) {
+ return AddEntry(object,
+ HeapEntry::kObject,
+ collection_->GetName(GetConstructorNameForHeapProfile(
+ JSObject::cast(object))),
+ children_count,
+ retainers_count);
+ } else if (object->IsString()) {
+ return AddEntry(object,
+ HeapEntry::kString,
+ collection_->GetName(String::cast(object)),
+ children_count,
+ retainers_count);
+ } else if (object->IsCode()) {
+ return AddEntry(object,
+ HeapEntry::kCode,
+ "",
+ children_count,
+ retainers_count);
+ } else if (object->IsSharedFunctionInfo()) {
+ SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
+ return AddEntry(object,
+ HeapEntry::kCode,
+ collection_->GetName(String::cast(shared->name())),
+ children_count,
+ retainers_count);
+ } else if (object->IsScript()) {
+ Script* script = Script::cast(object);
+ return AddEntry(object,
+ HeapEntry::kCode,
+ script->name()->IsString() ?
+ collection_->GetName(String::cast(script->name())) : "",
+ children_count,
+ retainers_count);
+ } else if (object->IsFixedArray()) {
+ return AddEntry(object,
+ HeapEntry::kArray,
+ "",
+ children_count,
+ retainers_count);
+ } else if (object->IsHeapNumber()) {
+ return AddEntry(object,
+ HeapEntry::kHeapNumber,
+ "number",
+ children_count,
+ retainers_count);
+ }
+ return AddEntry(object,
+ HeapEntry::kHidden,
+ "system",
+ children_count,
+ retainers_count);
+}
+
+
+HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
+ HeapEntry::Type type,
+ const char* name,
+ int children_count,
+ int retainers_count) {
+ return snapshot_->AddEntry(type,
+ name,
+ collection_->GetObjectId(object->address()),
+ object->Size(),
+ children_count,
+ retainers_count);
+}
+
+
+void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
+ filler->AddEntry(kInternalRootObject);
+ filler->AddEntry(kGcRootsObject);
+}
+
+
+int V8HeapExplorer::EstimateObjectsCount() {
+ HeapIterator iterator(HeapIterator::kFilterUnreachable);
+ int objects_count = 0;
+ for (HeapObject* obj = iterator.next();
+ obj != NULL;
+ obj = iterator.next(), ++objects_count) {}
+ return objects_count;
}
class IndexedReferencesExtractor : public ObjectVisitor {
public:
- IndexedReferencesExtractor(HeapSnapshotGenerator* generator,
+ IndexedReferencesExtractor(V8HeapExplorer* generator,
HeapObject* parent_obj,
HeapEntry* parent_entry,
HeapObjectsSet* known_references = NULL)
@@ -1917,7 +1784,7 @@
}
}
private:
- HeapSnapshotGenerator* generator_;
+ V8HeapExplorer* generator_;
HeapObject* parent_obj_;
HeapEntry* parent_;
HeapObjectsSet* known_references_;
@@ -1925,7 +1792,7 @@
};
-void HeapSnapshotGenerator::ExtractReferences(HeapObject* obj) {
+void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
HeapEntry* entry = GetEntry(obj);
if (entry == NULL) return; // No interest in this object.
@@ -1969,8 +1836,8 @@
}
-void HeapSnapshotGenerator::ExtractClosureReferences(JSObject* js_obj,
- HeapEntry* entry) {
+void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
+ HeapEntry* entry) {
if (js_obj->IsJSFunction()) {
HandleScope hs;
JSFunction* func = JSFunction::cast(js_obj);
@@ -1992,8 +1859,8 @@
}
-void HeapSnapshotGenerator::ExtractPropertyReferences(JSObject* js_obj,
- HeapEntry* entry) {
+void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
+ HeapEntry* entry) {
if (js_obj->HasFastProperties()) {
DescriptorArray* descs = js_obj->map()->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
@@ -2034,8 +1901,8 @@
}
-void HeapSnapshotGenerator::ExtractElementReferences(JSObject* js_obj,
- HeapEntry* entry) {
+void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
+ HeapEntry* entry) {
if (js_obj->HasFastElements()) {
FixedArray* elements = FixedArray::cast(js_obj->elements());
int length = js_obj->IsJSArray() ?
@@ -2061,8 +1928,8 @@
}
-void HeapSnapshotGenerator::ExtractInternalReferences(JSObject* js_obj,
- HeapEntry* entry) {
+void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj,
+ HeapEntry* entry) {
int length = js_obj->GetInternalFieldCount();
for (int i = 0; i < length; ++i) {
Object* o = js_obj->GetInternalField(i);
@@ -2071,10 +1938,55 @@
}
-void HeapSnapshotGenerator::SetClosureReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
- String* reference_name,
- Object* child_obj) {
+HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
+ if (!obj->IsHeapObject()) return NULL;
+ return filler_->FindOrAddEntry(obj);
+}
+
+
+class RootsReferencesExtractor : public ObjectVisitor {
+ public:
+ explicit RootsReferencesExtractor(V8HeapExplorer* explorer)
+ : explorer_(explorer) {
+ }
+ void VisitPointers(Object** start, Object** end) {
+ for (Object** p = start; p < end; p++) explorer_->SetGcRootsReference(*p);
+ }
+ private:
+ V8HeapExplorer* explorer_;
+};
+
+
+bool V8HeapExplorer::IterateAndExtractReferences(
+ SnapshotFillerInterface* filler) {
+ filler_ = filler;
+ HeapIterator iterator(HeapIterator::kFilterUnreachable);
+ bool interrupted = false;
+ // Heap iteration with filtering must be finished in any case.
+ for (HeapObject* obj = iterator.next();
+ obj != NULL;
+ obj = iterator.next(), progress_->ProgressStep()) {
+ if (!interrupted) {
+ ExtractReferences(obj);
+ if (!progress_->ProgressReport(false)) interrupted = true;
+ }
+ }
+ if (interrupted) {
+ filler_ = NULL;
+ return false;
+ }
+ SetRootGcRootsReference();
+ RootsReferencesExtractor extractor(this);
+ Heap::IterateRoots(&extractor, VISIT_ALL);
+ filler_ = NULL;
+ return progress_->ProgressReport(false);
+}
+
+
+void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ String* reference_name,
+ Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
@@ -2088,10 +2000,10 @@
}
-void HeapSnapshotGenerator::SetElementReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
- int index,
- Object* child_obj) {
+void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ int index,
+ Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetIndexedReference(HeapGraphEdge::kElement,
@@ -2105,10 +2017,10 @@
}
-void HeapSnapshotGenerator::SetInternalReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
- const char* reference_name,
- Object* child_obj) {
+void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ const char* reference_name,
+ Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetNamedReference(HeapGraphEdge::kInternal,
@@ -2122,10 +2034,10 @@
}
-void HeapSnapshotGenerator::SetInternalReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
- int index,
- Object* child_obj) {
+void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ int index,
+ Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetNamedReference(HeapGraphEdge::kInternal,
@@ -2139,10 +2051,10 @@
}
-void HeapSnapshotGenerator::SetHiddenReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
- int index,
- Object* child_obj) {
+void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ int index,
+ Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetIndexedReference(HeapGraphEdge::kHidden,
@@ -2155,10 +2067,10 @@
}
-void HeapSnapshotGenerator::SetPropertyReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
- String* reference_name,
- Object* child_obj) {
+void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ String* reference_name,
+ Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
HeapGraphEdge::Type type = reference_name->length() > 0 ?
@@ -2174,7 +2086,7 @@
}
-void HeapSnapshotGenerator::SetPropertyShortcutReference(
+void V8HeapExplorer::SetPropertyShortcutReference(
HeapObject* parent_obj,
HeapEntry* parent_entry,
String* reference_name,
@@ -2191,52 +2103,221 @@
}
-void HeapSnapshotGenerator::SetRootGcRootsReference() {
- filler_->SetRootGcRootsReference();
+void V8HeapExplorer::SetRootGcRootsReference() {
+ filler_->SetIndexedAutoIndexReference(
+ HeapGraphEdge::kElement,
+ kInternalRootObject, snapshot_->root(),
+ kGcRootsObject, snapshot_->gc_roots());
}
-void HeapSnapshotGenerator::SetRootShortcutReference(Object* child_obj) {
+void V8HeapExplorer::SetRootShortcutReference(Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
ASSERT(child_entry != NULL);
- filler_->SetRootShortcutReference(child_obj, child_entry);
+ filler_->SetNamedAutoIndexReference(
+ HeapGraphEdge::kShortcut,
+ kInternalRootObject, snapshot_->root(),
+ child_obj, child_entry);
}
-void HeapSnapshotGenerator::SetGcRootsReference(Object* child_obj) {
+void V8HeapExplorer::SetGcRootsReference(Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
- filler_->SetStrongRootReference(child_obj, child_entry);
+ filler_->SetIndexedAutoIndexReference(
+ HeapGraphEdge::kElement,
+ kGcRootsObject, snapshot_->gc_roots(),
+ child_obj, child_entry);
}
}
+HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
+ v8::ActivityControl* control)
+ : snapshot_(snapshot),
+ control_(control),
+ v8_heap_explorer_(snapshot_, this) {
+}
+
+
+class SnapshotCounter : public SnapshotFillerInterface {
+ public:
+ SnapshotCounter(HeapEntriesAllocator* allocator, HeapEntriesMap* entries)
+ : allocator_(allocator), entries_(entries) { }
+ HeapEntry* AddEntry(HeapThing ptr) {
+ entries_->Pair(ptr, allocator_, HeapEntriesMap::kHeapEntryPlaceholder);
+ return HeapEntriesMap::kHeapEntryPlaceholder;
+ }
+ HeapEntry* FindOrAddEntry(HeapThing ptr) {
+ HeapEntry* entry = entries_->Map(ptr);
+ return entry != NULL ? entry : AddEntry(ptr);
+ }
+ void SetIndexedReference(HeapGraphEdge::Type,
+ HeapThing parent_ptr,
+ HeapEntry*,
+ int,
+ HeapThing child_ptr,
+ HeapEntry*) {
+ entries_->CountReference(parent_ptr, child_ptr);
+ }
+ void SetIndexedAutoIndexReference(HeapGraphEdge::Type,
+ HeapThing parent_ptr,
+ HeapEntry*,
+ HeapThing child_ptr,
+ HeapEntry*) {
+ entries_->CountReference(parent_ptr, child_ptr);
+ }
+ void SetNamedReference(HeapGraphEdge::Type,
+ HeapThing parent_ptr,
+ HeapEntry*,
+ const char*,
+ HeapThing child_ptr,
+ HeapEntry*) {
+ entries_->CountReference(parent_ptr, child_ptr);
+ }
+ void SetNamedAutoIndexReference(HeapGraphEdge::Type,
+ HeapThing parent_ptr,
+ HeapEntry*,
+ HeapThing child_ptr,
+ HeapEntry*) {
+ entries_->CountReference(parent_ptr, child_ptr);
+ }
+ private:
+ HeapEntriesAllocator* allocator_;
+ HeapEntriesMap* entries_;
+};
+
+
+class SnapshotFiller : public SnapshotFillerInterface {
+ public:
+ explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
+ : snapshot_(snapshot),
+ collection_(snapshot->collection()),
+ entries_(entries) { }
+ HeapEntry* AddEntry(HeapThing ptr) {
+ UNREACHABLE();
+ return NULL;
+ }
+ HeapEntry* FindOrAddEntry(HeapThing ptr) {
+ HeapEntry* entry = entries_->Map(ptr);
+ return entry != NULL ? entry : AddEntry(ptr);
+ }
+ void SetIndexedReference(HeapGraphEdge::Type type,
+ HeapThing parent_ptr,
+ HeapEntry* parent_entry,
+ int index,
+ HeapThing child_ptr,
+ HeapEntry* child_entry) {
+ int child_index, retainer_index;
+ entries_->CountReference(
+ parent_ptr, child_ptr, &child_index, &retainer_index);
+ parent_entry->SetIndexedReference(
+ type, child_index, index, child_entry, retainer_index);
+ }
+ void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
+ HeapThing parent_ptr,
+ HeapEntry* parent_entry,
+ HeapThing child_ptr,
+ HeapEntry* child_entry) {
+ int child_index, retainer_index;
+ entries_->CountReference(
+ parent_ptr, child_ptr, &child_index, &retainer_index);
+ parent_entry->SetIndexedReference(
+ type, child_index, child_index + 1, child_entry, retainer_index);
+ }
+ void SetNamedReference(HeapGraphEdge::Type type,
+ HeapThing parent_ptr,
+ HeapEntry* parent_entry,
+ const char* reference_name,
+ HeapThing child_ptr,
+ HeapEntry* child_entry) {
+ int child_index, retainer_index;
+ entries_->CountReference(
+ parent_ptr, child_ptr, &child_index, &retainer_index);
+ parent_entry->SetNamedReference(
+ type, child_index, reference_name, child_entry, retainer_index);
+ }
+ void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
+ HeapThing parent_ptr,
+ HeapEntry* parent_entry,
+ HeapThing child_ptr,
+ HeapEntry* child_entry) {
+ int child_index, retainer_index;
+ entries_->CountReference(
+ parent_ptr, child_ptr, &child_index, &retainer_index);
+ parent_entry->SetNamedReference(type,
+ child_index,
+ collection_->GetName(child_index + 1),
+ child_entry,
+ retainer_index);
+ }
+ private:
+ HeapSnapshot* snapshot_;
+ HeapSnapshotsCollection* collection_;
+ HeapEntriesMap* entries_;
+};
+
+
+bool HeapSnapshotGenerator::GenerateSnapshot() {
+ AssertNoAllocation no_alloc;
+
+ SetProgressTotal(4); // 2 passes + dominators + sizes.
+
+ // Pass 1. Iterate heap contents to count entries and references.
+ if (!CountEntriesAndReferences()) return false;
+
+ // Allocate and fill entries in the snapshot, allocate references.
+ snapshot_->AllocateEntries(entries_.entries_count(),
+ entries_.total_children_count(),
+ entries_.total_retainers_count());
+ entries_.AllocateEntries();
+
+ // Pass 2. Fill references.
+ if (!FillReferences()) return false;
+
+ if (!SetEntriesDominators()) return false;
+ if (!ApproximateRetainedSizes()) return false;
+
+ progress_counter_ = progress_total_;
+ if (!ProgressReport(true)) return false;
+ return true;
+}
+
+
+void HeapSnapshotGenerator::ProgressStep() {
+ ++progress_counter_;
+}
+
+
+bool HeapSnapshotGenerator::ProgressReport(bool force) {
+ const int kProgressReportGranularity = 10000;
+ if (control_ != NULL
+ && (force || progress_counter_ % kProgressReportGranularity == 0)) {
+ return
+ control_->ReportProgressValue(progress_counter_, progress_total_) ==
+ v8::ActivityControl::kContinue;
+ }
+ return true;
+}
+
+
void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
if (control_ == NULL) return;
-
- HeapIterator iterator(HeapIterator::kFilterUnreachable);
- int objects_count = 0;
- for (HeapObject* obj = iterator.next();
- obj != NULL;
- obj = iterator.next(), ++objects_count) {}
- progress_total_ = objects_count * iterations_count;
+ progress_total_ = v8_heap_explorer_.EstimateObjectsCount() * iterations_count;
progress_counter_ = 0;
}
bool HeapSnapshotGenerator::CountEntriesAndReferences() {
- SnapshotCounter counter(&entries_);
- filler_ = &counter;
- filler_->AddEntry(HeapSnapshot::kInternalRootObject);
- filler_->AddEntry(HeapSnapshot::kGcRootsObject);
- return IterateAndExtractReferences();
+ SnapshotCounter counter(&v8_heap_explorer_, &entries_);
+ v8_heap_explorer_.AddRootEntries(&counter);
+ return v8_heap_explorer_.IterateAndExtractReferences(&counter);
}
bool HeapSnapshotGenerator::FillReferences() {
SnapshotFiller filler(snapshot_, &entries_);
- filler_ = &filler;
- return IterateAndExtractReferences();
+ return v8_heap_explorer_.IterateAndExtractReferences(&filler);
}
@@ -2322,7 +2403,7 @@
int remaining = entries_length - changed;
if (remaining < 0) remaining = 0;
progress_counter_ = base_progress_counter + remaining;
- if (!ReportProgress(true)) return false;
+ if (!ProgressReport(true)) return false;
}
return true;
}
@@ -2352,7 +2433,7 @@
}
for (int i = 0;
i < snapshot_->entries()->length();
- ++i, IncProgressCounter()) {
+ ++i, ProgressStep()) {
HeapEntry* entry = snapshot_->entries()->at(i);
int entry_size = entry->self_size();
for (HeapEntry* dominator = entry->dominator();
@@ -2360,32 +2441,12 @@
entry = dominator, dominator = entry->dominator()) {
dominator->add_retained_size(entry_size);
}
- if (!ReportProgress()) return false;
+ if (!ProgressReport()) return false;
}
return true;
}
-bool HeapSnapshotGenerator::IterateAndExtractReferences() {
- HeapIterator iterator(HeapIterator::kFilterUnreachable);
- bool interrupted = false;
- // Heap iteration with filtering must be finished in any case.
- for (HeapObject* obj = iterator.next();
- obj != NULL;
- obj = iterator.next(), IncProgressCounter()) {
- if (!interrupted) {
- ExtractReferences(obj);
- if (!ReportProgress()) interrupted = true;
- }
- }
- if (interrupted) return false;
- SetRootGcRootsReference();
- RootsReferencesExtractor extractor(this);
- Heap::IterateRoots(&extractor, VISIT_ALL);
- return ReportProgress();
-}
-
-
void HeapSnapshotsDiff::CreateRoots(int additions_count, int deletions_count) {
raw_additions_root_ =
NewArray<char>(HeapEntry::EntriesSize(1, additions_count, 0));
diff --git a/src/profile-generator.h b/src/profile-generator.h
index 748714d..4762eb6 100644
--- a/src/profile-generator.h
+++ b/src/profile-generator.h
@@ -681,14 +681,14 @@
void AllocateEntries(
int entries_count, int children_count, int retainers_count);
- HeapEntry* AddEntry(
- HeapObject* object, int children_count, int retainers_count);
HeapEntry* AddEntry(HeapEntry::Type type,
const char* name,
uint64_t id,
int size,
int children_count,
int retainers_count);
+ HeapEntry* AddRootEntry(int children_count);
+ HeapEntry* AddGcRootsEntry(int children_count, int retainers_count);
void ClearPaint();
HeapSnapshotsDiff* CompareWith(HeapSnapshot* snapshot);
HeapEntry* GetEntryById(uint64_t id);
@@ -701,15 +701,7 @@
void Print(int max_depth);
void PrintEntriesSize();
- static HeapObject* const kInternalRootObject;
- static HeapObject* const kGcRootsObject;
-
private:
- HeapEntry* AddEntry(HeapObject* object,
- HeapEntry::Type type,
- const char* name,
- int children_count,
- int retainers_count);
HeapEntry* GetNextEntryToInit();
HeapSnapshotsCollection* collection_;
@@ -873,6 +865,20 @@
};
+// A typedef for referencing anything that can be snapshotted living
+// in any kind of heap memory.
+typedef void* HeapThing;
+
+
+// An interface that creates HeapEntries by HeapThings.
+class HeapEntriesAllocator {
+ public:
+ virtual ~HeapEntriesAllocator() { }
+ virtual HeapEntry* AllocateEntry(
+ HeapThing ptr, int children_count, int retainers_count) = 0;
+};
+
+
// The HeapEntriesMap instance is used to track a mapping between
// real heap objects and their representations in heap snapshots.
class HeapEntriesMap {
@@ -880,13 +886,12 @@
HeapEntriesMap();
~HeapEntriesMap();
- HeapEntry* Map(HeapObject* object);
- void Pair(HeapObject* object, HeapEntry* entry);
- void CountReference(HeapObject* from, HeapObject* to,
+ void AllocateEntries();
+ HeapEntry* Map(HeapThing thing);
+ void Pair(HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry);
+ void CountReference(HeapThing from, HeapThing to,
int* prev_children_count = NULL,
int* prev_retainers_count = NULL);
- template<class Visitor>
- void UpdateEntries(Visitor* visitor);
int entries_count() { return entries_count_; }
int total_children_count() { return total_children_count_; }
@@ -896,18 +901,25 @@
private:
struct EntryInfo {
- explicit EntryInfo(HeapEntry* entry)
- : entry(entry), children_count(0), retainers_count(0) { }
+ EntryInfo(HeapEntry* entry, HeapEntriesAllocator* allocator)
+ : entry(entry),
+ allocator(allocator),
+ children_count(0),
+ retainers_count(0) {
+ }
HeapEntry* entry;
+ HeapEntriesAllocator* allocator;
int children_count;
int retainers_count;
};
- static uint32_t Hash(HeapObject* object) {
+ static uint32_t Hash(HeapThing thing) {
return ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object)));
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)));
}
- static bool HeapObjectsMatch(void* key1, void* key2) { return key1 == key2; }
+ static bool HeapThingsMatch(HeapThing key1, HeapThing key2) {
+ return key1 == key2;
+ }
HashMap entries_;
int entries_count_;
@@ -934,52 +946,70 @@
};
-class HeapSnapshotGenerator {
+// An interface used to populate a snapshot with nodes and edges.
+class SnapshotFillerInterface {
public:
- class SnapshotFillerInterface {
- public:
- virtual ~SnapshotFillerInterface() { }
- virtual HeapEntry* AddEntry(HeapObject* obj) = 0;
- virtual void SetIndexedReference(HeapGraphEdge::Type type,
- HeapObject* parent_obj,
- HeapEntry* parent_entry,
- int index,
- Object* child_obj,
- HeapEntry* child_entry) = 0;
- virtual void SetNamedReference(HeapGraphEdge::Type type,
- HeapObject* parent_obj,
+ virtual ~SnapshotFillerInterface() { }
+ virtual HeapEntry* AddEntry(HeapThing ptr) = 0;
+ virtual HeapEntry* FindOrAddEntry(HeapThing ptr) = 0;
+ virtual void SetIndexedReference(HeapGraphEdge::Type type,
+ HeapThing parent_ptr,
HeapEntry* parent_entry,
- const char* reference_name,
- Object* child_obj,
+ int index,
+ HeapThing child_ptr,
HeapEntry* child_entry) = 0;
- virtual void SetRootGcRootsReference() = 0;
- virtual void SetRootShortcutReference(Object* child_obj,
+ virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
+ HeapThing parent_ptr,
+ HeapEntry* parent_entry,
+ HeapThing child_ptr,
+ HeapEntry* child_entry) = 0;
+ virtual void SetNamedReference(HeapGraphEdge::Type type,
+ HeapThing parent_ptr,
+ HeapEntry* parent_entry,
+ const char* reference_name,
+ HeapThing child_ptr,
+ HeapEntry* child_entry) = 0;
+ virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
+ HeapThing parent_ptr,
+ HeapEntry* parent_entry,
+ HeapThing child_ptr,
HeapEntry* child_entry) = 0;
- virtual void SetStrongRootReference(Object* child_obj,
- HeapEntry* child_entry) = 0;
- };
+};
- HeapSnapshotGenerator(HeapSnapshot* snapshot,
- v8::ActivityControl* control);
- bool GenerateSnapshot();
+
+class SnapshottingProgressReportingInterface {
+ public:
+ virtual ~SnapshottingProgressReportingInterface() { }
+ virtual void ProgressStep() = 0;
+ virtual bool ProgressReport(bool force) = 0;
+};
+
+
+// An implementation of V8 heap graph extractor.
+class V8HeapExplorer : public HeapEntriesAllocator {
+ public:
+ V8HeapExplorer(HeapSnapshot* snapshot,
+ SnapshottingProgressReportingInterface* progress);
+ ~V8HeapExplorer();
+ virtual HeapEntry* AllocateEntry(
+ HeapThing ptr, int children_count, int retainers_count);
+ void AddRootEntries(SnapshotFillerInterface* filler);
+ int EstimateObjectsCount();
+ bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
private:
- bool ApproximateRetainedSizes();
- bool BuildDominatorTree(const Vector<HeapEntry*>& entries,
- Vector<HeapEntry*>* dominators);
- bool CountEntriesAndReferences();
- HeapEntry* GetEntry(Object* obj);
- void IncProgressCounter() { ++progress_counter_; }
+ HeapEntry* AddEntry(
+ HeapObject* object, int children_count, int retainers_count);
+ HeapEntry* AddEntry(HeapObject* object,
+ HeapEntry::Type type,
+ const char* name,
+ int children_count,
+ int retainers_count);
void ExtractReferences(HeapObject* obj);
void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry);
void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry);
void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry);
void ExtractInternalReferences(JSObject* js_obj, HeapEntry* entry);
- bool FillReferences();
- void FillReversePostorderIndexes(Vector<HeapEntry*>* entries);
- bool IterateAndExtractReferences();
- inline bool ReportProgress(bool force = false);
- bool SetEntriesDominators();
void SetClosureReference(HeapObject* parent_obj,
HeapEntry* parent,
String* reference_name,
@@ -1011,24 +1041,54 @@
void SetRootShortcutReference(Object* child);
void SetRootGcRootsReference();
void SetGcRootsReference(Object* child);
+
+ HeapEntry* GetEntry(Object* obj);
+
+ HeapSnapshot* snapshot_;
+ HeapSnapshotsCollection* collection_;
+ SnapshottingProgressReportingInterface* progress_;
+ // Used during references extraction to mark heap objects that
+ // are references via non-hidden properties.
+ HeapObjectsSet known_references_;
+ SnapshotFillerInterface* filler_;
+
+ static HeapObject* const kInternalRootObject;
+ static HeapObject* const kGcRootsObject;
+
+ friend class IndexedReferencesExtractor;
+ friend class RootsReferencesExtractor;
+
+ DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer);
+};
+
+
+class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
+ public:
+ HeapSnapshotGenerator(HeapSnapshot* snapshot,
+ v8::ActivityControl* control);
+ bool GenerateSnapshot();
+
+ private:
+ bool ApproximateRetainedSizes();
+ bool BuildDominatorTree(const Vector<HeapEntry*>& entries,
+ Vector<HeapEntry*>* dominators);
+ bool CountEntriesAndReferences();
+ bool FillReferences();
+ void FillReversePostorderIndexes(Vector<HeapEntry*>* entries);
+ void ProgressStep();
+ bool ProgressReport(bool force = false);
+ bool SetEntriesDominators();
void SetProgressTotal(int iterations_count);
HeapSnapshot* snapshot_;
v8::ActivityControl* control_;
- HeapSnapshotsCollection* collection_;
- // Mapping from HeapObject* pointers to HeapEntry* pointers.
+ V8HeapExplorer v8_heap_explorer_;
+ // Mapping from HeapThing pointers to HeapEntry* pointers.
HeapEntriesMap entries_;
- SnapshotFillerInterface* filler_;
- // Used during references extraction to mark heap objects that
- // are references via non-hidden properties.
- HeapObjectsSet known_references_;
// Used during snapshot generation.
int progress_counter_;
int progress_total_;
- friend class IndexedReferencesExtractor;
- friend class RootsReferencesExtractor;
-
DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator);
};
diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc
index 3406cdc..df6471e 100644
--- a/src/runtime-profiler.cc
+++ b/src/runtime-profiler.cc
@@ -35,6 +35,7 @@
#include "deoptimizer.h"
#include "execution.h"
#include "global-handles.h"
+#include "mark-compact.h"
#include "scopeinfo.h"
#include "top.h"
@@ -100,11 +101,6 @@
// The ratio of ticks spent in JS code in percent.
static Atomic32 js_ratio;
-// The JSFunctions in the sampler window are not GC safe. Old-space
-// pointers are not cleared during mark-sweep collection and therefore
-// the window might contain stale pointers. The window is updated on
-// scavenges and (parts of it) cleared on mark-sweep and
-// mark-sweep-compact.
static Object* sampler_window[kSamplerWindowSize] = { NULL, };
static int sampler_window_position = 0;
static int sampler_window_weight[kSamplerWindowSize] = { 0, };
@@ -134,7 +130,6 @@
static bool IsOptimizable(JSFunction* function) {
- if (Heap::InNewSpace(function)) return false;
Code* code = function->code();
return code->kind() == Code::FUNCTION && code->optimizable();
}
@@ -208,16 +203,6 @@
}
-static void ClearSampleBufferNewSpaceEntries() {
- for (int i = 0; i < kSamplerWindowSize; i++) {
- if (Heap::InNewSpace(sampler_window[i])) {
- sampler_window[i] = NULL;
- sampler_window_weight[i] = 0;
- }
- }
-}
-
-
static int LookupSample(JSFunction* function) {
int weight = 0;
for (int i = 0; i < kSamplerWindowSize; i++) {
@@ -372,24 +357,6 @@
}
-void RuntimeProfiler::MarkCompactPrologue(bool is_compacting) {
- if (is_compacting) {
- // Clear all samples before mark-sweep-compact because every
- // function might move.
- ClearSampleBuffer();
- } else {
- // Clear only new space entries on mark-sweep since none of the
- // old-space functions will move.
- ClearSampleBufferNewSpaceEntries();
- }
-}
-
-
-bool IsEqual(void* first, void* second) {
- return first == second;
-}
-
-
void RuntimeProfiler::Setup() {
ClearSampleBuffer();
// If the ticker hasn't already started, make sure to do so to get
@@ -411,13 +378,41 @@
}
-Object** RuntimeProfiler::SamplerWindowAddress() {
- return sampler_window;
+int RuntimeProfiler::SamplerWindowSize() {
+ return kSamplerWindowSize;
}
-int RuntimeProfiler::SamplerWindowSize() {
- return kSamplerWindowSize;
+// Update the pointers in the sampler window after a GC.
+void RuntimeProfiler::UpdateSamplesAfterScavenge() {
+ for (int i = 0; i < kSamplerWindowSize; i++) {
+ Object* function = sampler_window[i];
+ if (function != NULL && Heap::InNewSpace(function)) {
+ MapWord map_word = HeapObject::cast(function)->map_word();
+ if (map_word.IsForwardingAddress()) {
+ sampler_window[i] = map_word.ToForwardingAddress();
+ } else {
+ sampler_window[i] = NULL;
+ }
+ }
+ }
+}
+
+
+void RuntimeProfiler::RemoveDeadSamples() {
+ for (int i = 0; i < kSamplerWindowSize; i++) {
+ Object* function = sampler_window[i];
+ if (function != NULL && !HeapObject::cast(function)->IsMarked()) {
+ sampler_window[i] = NULL;
+ }
+ }
+}
+
+
+void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
+ for (int i = 0; i < kSamplerWindowSize; i++) {
+ visitor->VisitPointer(&sampler_window[i]);
+ }
}
diff --git a/src/runtime-profiler.h b/src/runtime-profiler.h
index e041c05..02defc9 100644
--- a/src/runtime-profiler.h
+++ b/src/runtime-profiler.h
@@ -47,9 +47,10 @@
static void Reset();
static void TearDown();
- static void MarkCompactPrologue(bool is_compacting);
- static Object** SamplerWindowAddress();
static int SamplerWindowSize();
+ static void UpdateSamplesAfterScavenge();
+ static void RemoveDeadSamples();
+ static void UpdateSamplesAfterCompact(ObjectVisitor* visitor);
};
diff --git a/src/runtime.cc b/src/runtime.cc
index dce2e15..0c15f60 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -40,8 +40,10 @@
#include "debug.h"
#include "deoptimizer.h"
#include "execution.h"
+#include "global-handles.h"
#include "jsregexp.h"
#include "liveedit.h"
+#include "liveobjectlist-inl.h"
#include "parser.h"
#include "platform.h"
#include "runtime.h"
@@ -160,7 +162,8 @@
if (!maybe_result->ToObject(&result)) return maybe_result;
}
{ MaybeObject* maybe_result =
- copy->SetProperty(key_string, result, NONE);
+ // Creating object copy for literals. No strict mode needed.
+ copy->SetProperty(key_string, result, NONE, kNonStrictMode);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
}
@@ -546,7 +549,9 @@
// Assign the exception value to the catch variable and make sure
// that the catch variable is DontDelete.
{ MaybeObject* maybe_value =
- JSObject::cast(object)->SetProperty(key, value, DONT_DELETE);
+ // Passing non-strict per ECMA-262 5th Ed. 12.14. Catch, bullet #4.
+ JSObject::cast(object)->SetProperty(
+ key, value, DONT_DELETE, kNonStrictMode);
if (!maybe_value->ToObject(&value)) return maybe_value;
}
return object;
@@ -783,7 +788,8 @@
case JSObject::INTERCEPTED_ELEMENT:
case JSObject::FAST_ELEMENT: {
elms->set(IS_ACCESSOR_INDEX, Heap::false_value());
- elms->set(VALUE_INDEX, *GetElement(obj, index));
+ Handle<Object> value = GetElement(obj, index);
+ elms->set(VALUE_INDEX, *value);
elms->set(WRITABLE_INDEX, Heap::true_value());
elms->set(ENUMERABLE_INDEX, Heap::true_value());
elms->set(CONFIGURABLE_INDEX, Heap::true_value());
@@ -816,12 +822,14 @@
}
break;
}
- case NORMAL:
+ case NORMAL: {
// This is a data property.
elms->set(IS_ACCESSOR_INDEX, Heap::false_value());
- elms->set(VALUE_INDEX, *GetElement(obj, index));
+ Handle<Object> value = GetElement(obj, index);
+ elms->set(VALUE_INDEX, *value);
elms->set(WRITABLE_INDEX, Heap::ToBoolean(!details.IsReadOnly()));
break;
+ }
default:
UNREACHABLE();
break;
@@ -994,12 +1002,16 @@
static MaybeObject* Runtime_DeclareGlobals(Arguments args) {
+ ASSERT(args.length() == 4);
HandleScope scope;
Handle<GlobalObject> global = Handle<GlobalObject>(Top::context()->global());
Handle<Context> context = args.at<Context>(0);
CONVERT_ARG_CHECKED(FixedArray, pairs, 1);
bool is_eval = Smi::cast(args[2])->value() == 1;
+ StrictModeFlag strict_mode =
+ static_cast<StrictModeFlag>(Smi::cast(args[3])->value());
+ ASSERT(strict_mode == kStrictMode || strict_mode == kNonStrictMode);
// Compute the property attributes. According to ECMA-262, section
// 13, page 71, the property must be read-only and
@@ -1104,12 +1116,21 @@
// onload setter in those case and Safari does not. We follow
// Safari for compatibility.
if (value->IsJSFunction()) {
+ // Do not change DONT_DELETE to false from true.
+ if (lookup.IsProperty() && (lookup.type() != INTERCEPTOR)) {
+ attributes = static_cast<PropertyAttributes>(
+ attributes | (lookup.GetAttributes() & DONT_DELETE));
+ }
RETURN_IF_EMPTY_HANDLE(SetLocalPropertyIgnoreAttributes(global,
name,
value,
attributes));
} else {
- RETURN_IF_EMPTY_HANDLE(SetProperty(global, name, value, attributes));
+ RETURN_IF_EMPTY_HANDLE(SetProperty(global,
+ name,
+ value,
+ attributes,
+ strict_mode));
}
}
@@ -1170,7 +1191,8 @@
// Slow case: The property is not in the FixedArray part of the context.
Handle<JSObject> context_ext = Handle<JSObject>::cast(holder);
RETURN_IF_EMPTY_HANDLE(
- SetProperty(context_ext, name, initial_value, mode));
+ SetProperty(context_ext, name, initial_value,
+ mode, kNonStrictMode));
}
}
@@ -1211,7 +1233,8 @@
return ThrowRedeclarationError("const", name);
}
}
- RETURN_IF_EMPTY_HANDLE(SetProperty(context_ext, name, value, mode));
+ RETURN_IF_EMPTY_HANDLE(SetProperty(context_ext, name, value, mode,
+ kNonStrictMode));
}
return Heap::undefined_value();
@@ -1220,14 +1243,21 @@
static MaybeObject* Runtime_InitializeVarGlobal(Arguments args) {
NoHandleAllocation nha;
+ // args[0] == name
+ // args[1] == strict_mode
+ // args[2] == value (optional)
// Determine if we need to assign to the variable if it already
// exists (based on the number of arguments).
- RUNTIME_ASSERT(args.length() == 1 || args.length() == 2);
- bool assign = args.length() == 2;
+ RUNTIME_ASSERT(args.length() == 2 || args.length() == 3);
+ bool assign = args.length() == 3;
CONVERT_ARG_CHECKED(String, name, 0);
GlobalObject* global = Top::context()->global();
+ RUNTIME_ASSERT(args[1]->IsSmi());
+ StrictModeFlag strict_mode =
+ static_cast<StrictModeFlag>(Smi::cast(args[1])->value());
+ ASSERT(strict_mode == kStrictMode || strict_mode == kNonStrictMode);
// According to ECMA-262, section 12.2, page 62, the property must
// not be deletable.
@@ -1283,8 +1313,9 @@
}
// Assign the value (or undefined) to the property.
- Object* value = (assign) ? args[1] : Heap::undefined_value();
- return real_holder->SetProperty(&lookup, *name, value, attributes);
+ Object* value = (assign) ? args[2] : Heap::undefined_value();
+ return real_holder->SetProperty(
+ &lookup, *name, value, attributes, strict_mode);
}
Object* proto = real_holder->GetPrototype();
@@ -1298,7 +1329,9 @@
}
global = Top::context()->global();
- if (assign) return global->SetProperty(*name, args[1], attributes);
+ if (assign) {
+ return global->SetProperty(*name, args[2], attributes, strict_mode);
+ }
return Heap::undefined_value();
}
@@ -1357,13 +1390,19 @@
// BUG 1213575: Handle the case where we have to set a read-only
// property through an interceptor and only do it if it's
// uninitialized, e.g. the hole. Nirk...
- RETURN_IF_EMPTY_HANDLE(SetProperty(global, name, value, attributes));
+ // Passing non-strict mode because the property is writable.
+ RETURN_IF_EMPTY_HANDLE(SetProperty(global,
+ name,
+ value,
+ attributes,
+ kNonStrictMode));
return *value;
}
// Set the value, but only we're assigning the initial value to a
// constant. For now, we determine this by checking if the
// current value is the hole.
+ // Strict mode handling not needed (const disallowed in strict mode).
PropertyType type = lookup.type();
if (type == FIELD) {
FixedArray* properties = global->properties();
@@ -1439,7 +1478,9 @@
// context.
if (attributes == ABSENT) {
Handle<JSObject> global = Handle<JSObject>(Top::context()->global());
- RETURN_IF_EMPTY_HANDLE(SetProperty(global, name, value, NONE));
+ // Strict mode not needed (const disallowed in strict mode).
+ RETURN_IF_EMPTY_HANDLE(
+ SetProperty(global, name, value, NONE, kNonStrictMode));
return *value;
}
@@ -1476,8 +1517,9 @@
// The property was found in a different context extension object.
// Set it if it is not a read-only property.
if ((attributes & READ_ONLY) == 0) {
+ // Strict mode not needed (const disallowed in strict mode).
RETURN_IF_EMPTY_HANDLE(
- SetProperty(context_ext, name, value, attributes));
+ SetProperty(context_ext, name, value, attributes, kNonStrictMode));
}
}
@@ -1643,7 +1685,7 @@
code,
false);
optimized->shared()->DontAdaptArguments();
- SetProperty(holder, key, optimized, NONE);
+ SetProperty(holder, key, optimized, NONE, kStrictMode);
return optimized;
}
@@ -3739,7 +3781,8 @@
MaybeObject* Runtime::SetObjectProperty(Handle<Object> object,
Handle<Object> key,
Handle<Object> value,
- PropertyAttributes attr) {
+ PropertyAttributes attr,
+ StrictModeFlag strict) {
HandleScope scope;
if (object->IsUndefined() || object->IsNull()) {
@@ -3769,6 +3812,7 @@
return *value;
}
+ // TODO(1220): Implement SetElement strict mode.
Handle<Object> result = SetElement(js_object, index, value);
if (result.is_null()) return Failure::Exception();
return *value;
@@ -3781,7 +3825,7 @@
} else {
Handle<String> key_string = Handle<String>::cast(key);
key_string->TryFlatten();
- result = SetProperty(js_object, key_string, value, attr);
+ result = SetProperty(js_object, key_string, value, attr, strict);
}
if (result.is_null()) return Failure::Exception();
return *value;
@@ -3794,9 +3838,10 @@
Handle<String> name = Handle<String>::cast(converted);
if (name->AsArrayIndex(&index)) {
+ // TODO(1220): Implement SetElement strict mode.
return js_object->SetElement(index, *value);
} else {
- return js_object->SetProperty(*name, *value, attr);
+ return js_object->SetProperty(*name, *value, attr, strict);
}
}
@@ -3888,23 +3933,27 @@
static MaybeObject* Runtime_SetProperty(Arguments args) {
NoHandleAllocation ha;
- RUNTIME_ASSERT(args.length() == 3 || args.length() == 4);
+ RUNTIME_ASSERT(args.length() == 4 || args.length() == 5);
Handle<Object> object = args.at<Object>(0);
Handle<Object> key = args.at<Object>(1);
Handle<Object> value = args.at<Object>(2);
-
+ CONVERT_SMI_CHECKED(unchecked_attributes, args[3]);
+ RUNTIME_ASSERT(
+ (unchecked_attributes & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
// Compute attributes.
- PropertyAttributes attributes = NONE;
- if (args.length() == 4) {
- CONVERT_CHECKED(Smi, value_obj, args[3]);
- int unchecked_value = value_obj->value();
- // Only attribute bits should be set.
- RUNTIME_ASSERT(
- (unchecked_value & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
- attributes = static_cast<PropertyAttributes>(unchecked_value);
+ PropertyAttributes attributes =
+ static_cast<PropertyAttributes>(unchecked_attributes);
+
+ StrictModeFlag strict = kNonStrictMode;
+ if (args.length() == 5) {
+ CONVERT_SMI_CHECKED(strict_unchecked, args[4]);
+ RUNTIME_ASSERT(strict_unchecked == kStrictMode ||
+ strict_unchecked == kNonStrictMode);
+ strict = static_cast<StrictModeFlag>(strict_unchecked);
}
- return Runtime::SetObjectProperty(object, key, value, attributes);
+
+ return Runtime::SetObjectProperty(object, key, value, attributes, strict);
}
@@ -3938,7 +3987,7 @@
CONVERT_CHECKED(JSObject, object, args[0]);
CONVERT_CHECKED(String, key, args[1]);
CONVERT_SMI_CHECKED(strict, args[2]);
- return object->DeleteProperty(key, strict == kStrictMode
+ return object->DeleteProperty(key, (strict == kStrictMode)
? JSObject::STRICT_DELETION
: JSObject::NORMAL_DELETION);
}
@@ -7486,11 +7535,16 @@
static MaybeObject* Runtime_StoreContextSlot(Arguments args) {
HandleScope scope;
- ASSERT(args.length() == 3);
+ ASSERT(args.length() == 4);
Handle<Object> value(args[0]);
CONVERT_ARG_CHECKED(Context, context, 1);
CONVERT_ARG_CHECKED(String, name, 2);
+ CONVERT_SMI_CHECKED(strict_unchecked, args[3]);
+ RUNTIME_ASSERT(strict_unchecked == kStrictMode ||
+ strict_unchecked == kNonStrictMode);
+ StrictModeFlag strict = static_cast<StrictModeFlag>(strict_unchecked);
+
int index;
PropertyAttributes attributes;
@@ -7534,7 +7588,12 @@
// extension object itself.
if ((attributes & READ_ONLY) == 0 ||
(context_ext->GetLocalPropertyAttribute(*name) == ABSENT)) {
- RETURN_IF_EMPTY_HANDLE(SetProperty(context_ext, name, value, NONE));
+ RETURN_IF_EMPTY_HANDLE(SetProperty(context_ext, name, value, NONE, strict));
+ } else if (strict == kStrictMode && (attributes & READ_ONLY) != 0) {
+ // Setting read only property in strict mode.
+ Handle<Object> error =
+ Factory::NewTypeError("strict_cannot_assign", HandleVector(&name, 1));
+ return Top::Throw(*error);
}
return *value;
}
@@ -7863,12 +7922,9 @@
static ObjectPair Runtime_ResolvePossiblyDirectEval(Arguments args) {
ASSERT(args.length() == 4);
- if (!args[0]->IsJSFunction()) {
- return MakePair(Top::ThrowIllegalOperation(), NULL);
- }
HandleScope scope;
- Handle<JSFunction> callee = args.at<JSFunction>(0);
+ Handle<Object> callee = args.at<Object>(0);
Handle<Object> receiver; // Will be overwritten.
// Compute the calling context.
@@ -7936,12 +7992,9 @@
static ObjectPair Runtime_ResolvePossiblyDirectEvalNoLookup(Arguments args) {
ASSERT(args.length() == 4);
- if (!args[0]->IsJSFunction()) {
- return MakePair(Top::ThrowIllegalOperation(), NULL);
- }
HandleScope scope;
- Handle<JSFunction> callee = args.at<JSFunction>(0);
+ Handle<Object> callee = args.at<Object>(0);
// 'eval' is bound in the global context, but it may have been overwritten.
// Compare it to the builtin 'GlobalEval' function to make sure.
@@ -8033,10 +8086,14 @@
public:
ArrayConcatVisitor(Handle<FixedArray> storage,
bool fast_elements) :
- storage_(storage),
+ storage_(Handle<FixedArray>::cast(GlobalHandles::Create(*storage))),
index_offset_(0u),
fast_elements_(fast_elements) { }
+ ~ArrayConcatVisitor() {
+ clear_storage();
+ }
+
void visit(uint32_t i, Handle<Object> elm) {
if (i >= JSObject::kMaxElementCount - index_offset_) return;
uint32_t index = index_offset_ + i;
@@ -8054,11 +8111,13 @@
// Fall-through to dictionary mode.
}
ASSERT(!fast_elements_);
- Handle<NumberDictionary> dict(storage_.cast<NumberDictionary>());
+ Handle<NumberDictionary> dict(NumberDictionary::cast(*storage_));
Handle<NumberDictionary> result =
Factory::DictionaryAtNumberPut(dict, index, elm);
if (!result.is_identical_to(dict)) {
- storage_ = Handle<FixedArray>::cast(result);
+ // Dictionary needed to grow.
+ clear_storage();
+ set_storage(*result);
}
}
@@ -8090,23 +8149,35 @@
// Convert storage to dictionary mode.
void SetDictionaryMode(uint32_t index) {
ASSERT(fast_elements_);
- Handle<FixedArray> current_storage(storage_.ToHandle());
- HandleCell<NumberDictionary> slow_storage(
+ Handle<FixedArray> current_storage(*storage_);
+ Handle<NumberDictionary> slow_storage(
Factory::NewNumberDictionary(current_storage->length()));
uint32_t current_length = static_cast<uint32_t>(current_storage->length());
for (uint32_t i = 0; i < current_length; i++) {
HandleScope loop_scope;
Handle<Object> element(current_storage->get(i));
if (!element->IsTheHole()) {
- slow_storage =
- Factory::DictionaryAtNumberPut(slow_storage.ToHandle(), i, element);
+ Handle<NumberDictionary> new_storage =
+ Factory::DictionaryAtNumberPut(slow_storage, i, element);
+ if (!new_storage.is_identical_to(slow_storage)) {
+ slow_storage = loop_scope.CloseAndEscape(new_storage);
+ }
}
}
- storage_ = slow_storage.cast<FixedArray>();
+ clear_storage();
+ set_storage(*slow_storage);
fast_elements_ = false;
}
- HandleCell<FixedArray> storage_;
+ inline void clear_storage() {
+ GlobalHandles::Destroy(Handle<Object>::cast(storage_).location());
+ }
+
+ inline void set_storage(FixedArray* storage) {
+ storage_ = Handle<FixedArray>::cast(GlobalHandles::Create(storage));
+ }
+
+ Handle<FixedArray> storage_; // Always a global handle.
// Index after last seen index. Always less than or equal to
// JSObject::kMaxElementCount.
uint32_t index_offset_;
@@ -9267,7 +9338,9 @@
RETURN_IF_EMPTY_HANDLE_VALUE(
SetProperty(scope_object,
scope_info.context_slot_name(i),
- Handle<Object>(context->get(context_index)), NONE),
+ Handle<Object>(context->get(context_index)),
+ NONE,
+ kNonStrictMode),
false);
}
}
@@ -9293,7 +9366,9 @@
RETURN_IF_EMPTY_HANDLE_VALUE(
SetProperty(local_scope,
scope_info.parameter_name(i),
- Handle<Object>(frame->GetParameter(i)), NONE),
+ Handle<Object>(frame->GetParameter(i)),
+ NONE,
+ kNonStrictMode),
Handle<JSObject>());
}
@@ -9302,7 +9377,9 @@
RETURN_IF_EMPTY_HANDLE_VALUE(
SetProperty(local_scope,
scope_info.stack_slot_name(i),
- Handle<Object>(frame->GetExpression(i)), NONE),
+ Handle<Object>(frame->GetExpression(i)),
+ NONE,
+ kNonStrictMode),
Handle<JSObject>());
}
@@ -9326,7 +9403,11 @@
ASSERT(keys->get(i)->IsString());
Handle<String> key(String::cast(keys->get(i)));
RETURN_IF_EMPTY_HANDLE_VALUE(
- SetProperty(local_scope, key, GetProperty(ext, key), NONE),
+ SetProperty(local_scope,
+ key,
+ GetProperty(ext, key),
+ NONE,
+ kNonStrictMode),
Handle<JSObject>());
}
}
@@ -9364,7 +9445,8 @@
SetProperty(closure_scope,
scope_info.parameter_name(i),
Handle<Object>(element),
- NONE),
+ NONE,
+ kNonStrictMode),
Handle<JSObject>());
}
}
@@ -9385,7 +9467,11 @@
ASSERT(keys->get(i)->IsString());
Handle<String> key(String::cast(keys->get(i)));
RETURN_IF_EMPTY_HANDLE_VALUE(
- SetProperty(closure_scope, key, GetProperty(ext, key), NONE),
+ SetProperty(closure_scope,
+ key,
+ GetProperty(ext, key),
+ NONE,
+ kNonStrictMode),
Handle<JSObject>());
}
}
@@ -10863,6 +10949,207 @@
}
return Smi::FromInt(usage);
}
+
+
+// Captures a live object list from the present heap.
+static MaybeObject* Runtime_HasLOLEnabled(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ return Heap::true_value();
+#else
+ return Heap::false_value();
+#endif
+}
+
+
+// Captures a live object list from the present heap.
+static MaybeObject* Runtime_CaptureLOL(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ return LiveObjectList::Capture();
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Deletes the specified live object list.
+static MaybeObject* Runtime_DeleteLOL(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ CONVERT_SMI_CHECKED(id, args[0]);
+ bool success = LiveObjectList::Delete(id);
+ return success ? Heap::true_value() : Heap::false_value();
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Generates the response to a debugger request for a dump of the objects
+// contained in the difference between the captured live object lists
+// specified by id1 and id2.
+// If id1 is 0 (i.e. not a valid lol), then the whole of lol id2 will be
+// dumped.
+static MaybeObject* Runtime_DumpLOL(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ HandleScope scope;
+ CONVERT_SMI_CHECKED(id1, args[0]);
+ CONVERT_SMI_CHECKED(id2, args[1]);
+ CONVERT_SMI_CHECKED(start, args[2]);
+ CONVERT_SMI_CHECKED(count, args[3]);
+ CONVERT_ARG_CHECKED(JSObject, filter_obj, 4);
+ EnterDebugger enter_debugger;
+ return LiveObjectList::Dump(id1, id2, start, count, filter_obj);
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Gets the specified object as requested by the debugger.
+// This is only used for obj ids shown in live object lists.
+static MaybeObject* Runtime_GetLOLObj(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ CONVERT_SMI_CHECKED(obj_id, args[0]);
+ Object* result = LiveObjectList::GetObj(obj_id);
+ return result;
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Gets the obj id for the specified address if valid.
+// This is only used for obj ids shown in live object lists.
+static MaybeObject* Runtime_GetLOLObjId(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ HandleScope scope;
+ CONVERT_ARG_CHECKED(String, address, 0);
+ Object* result = LiveObjectList::GetObjId(address);
+ return result;
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Gets the retainers that references the specified object alive.
+static MaybeObject* Runtime_GetLOLObjRetainers(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ HandleScope scope;
+ CONVERT_SMI_CHECKED(obj_id, args[0]);
+ RUNTIME_ASSERT(args[1]->IsUndefined() || args[1]->IsJSObject());
+ RUNTIME_ASSERT(args[2]->IsUndefined() || args[2]->IsBoolean());
+ RUNTIME_ASSERT(args[3]->IsUndefined() || args[3]->IsSmi());
+ RUNTIME_ASSERT(args[4]->IsUndefined() || args[4]->IsSmi());
+ CONVERT_ARG_CHECKED(JSObject, filter_obj, 5);
+
+ Handle<JSObject> instance_filter;
+ if (args[1]->IsJSObject()) {
+ instance_filter = args.at<JSObject>(1);
+ }
+ bool verbose = false;
+ if (args[2]->IsBoolean()) {
+ verbose = args[2]->IsTrue();
+ }
+ int start = 0;
+ if (args[3]->IsSmi()) {
+ start = Smi::cast(args[3])->value();
+ }
+ int limit = Smi::kMaxValue;
+ if (args[4]->IsSmi()) {
+ limit = Smi::cast(args[4])->value();
+ }
+
+ return LiveObjectList::GetObjRetainers(obj_id,
+ instance_filter,
+ verbose,
+ start,
+ limit,
+ filter_obj);
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Gets the reference path between 2 objects.
+static MaybeObject* Runtime_GetLOLPath(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ HandleScope scope;
+ CONVERT_SMI_CHECKED(obj_id1, args[0]);
+ CONVERT_SMI_CHECKED(obj_id2, args[1]);
+ RUNTIME_ASSERT(args[2]->IsUndefined() || args[2]->IsJSObject());
+
+ Handle<JSObject> instance_filter;
+ if (args[2]->IsJSObject()) {
+ instance_filter = args.at<JSObject>(2);
+ }
+
+ Object* result =
+ LiveObjectList::GetPath(obj_id1, obj_id2, instance_filter);
+ return result;
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Generates the response to a debugger request for a list of all
+// previously captured live object lists.
+static MaybeObject* Runtime_InfoLOL(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ CONVERT_SMI_CHECKED(start, args[0]);
+ CONVERT_SMI_CHECKED(count, args[1]);
+ return LiveObjectList::Info(start, count);
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Gets a dump of the specified object as requested by the debugger.
+// This is only used for obj ids shown in live object lists.
+static MaybeObject* Runtime_PrintLOLObj(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ HandleScope scope;
+ CONVERT_SMI_CHECKED(obj_id, args[0]);
+ Object* result = LiveObjectList::PrintObj(obj_id);
+ return result;
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Resets and releases all previously captured live object lists.
+static MaybeObject* Runtime_ResetLOL(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ LiveObjectList::Reset();
+ return Heap::undefined_value();
+#else
+ return Heap::undefined_value();
+#endif
+}
+
+
+// Generates the response to a debugger request for a summary of the types
+// of objects in the difference between the captured live object lists
+// specified by id1 and id2.
+// If id1 is 0 (i.e. not a valid lol), then the whole of lol id2 will be
+// summarized.
+static MaybeObject* Runtime_SummarizeLOL(Arguments args) {
+#ifdef LIVE_OBJECT_LIST
+ HandleScope scope;
+ CONVERT_SMI_CHECKED(id1, args[0]);
+ CONVERT_SMI_CHECKED(id2, args[1]);
+ CONVERT_ARG_CHECKED(JSObject, filter_obj, 2);
+
+ EnterDebugger enter_debugger;
+ return LiveObjectList::Summarize(id1, id2, filter_obj);
+#else
+ return Heap::undefined_value();
+#endif
+}
+
#endif // ENABLE_DEBUGGER_SUPPORT
diff --git a/src/runtime.h b/src/runtime.h
index 06437ef..9dd6eda 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -241,7 +241,7 @@
F(ResolvePossiblyDirectEval, 4, 2) \
F(ResolvePossiblyDirectEvalNoLookup, 4, 2) \
\
- F(SetProperty, -1 /* 3 or 4 */, 1) \
+ F(SetProperty, -1 /* 4 or 5 */, 1) \
F(DefineOrRedefineDataProperty, 4, 1) \
F(DefineOrRedefineAccessorProperty, 5, 1) \
F(IgnoreAttributesAndSetProperty, -1 /* 3 or 4 */, 1) \
@@ -288,12 +288,12 @@
F(DeleteContextSlot, 2, 1) \
F(LoadContextSlot, 2, 2) \
F(LoadContextSlotNoReferenceError, 2, 2) \
- F(StoreContextSlot, 3, 1) \
+ F(StoreContextSlot, 4, 1) \
\
/* Declarations and initialization */ \
- F(DeclareGlobals, 3, 1) \
+ F(DeclareGlobals, 4, 1) \
F(DeclareContextSlot, 4, 1) \
- F(InitializeVarGlobal, -1 /* 1 or 2 */, 1) \
+ F(InitializeVarGlobal, -1 /* 2 or 3 */, 1) \
F(InitializeConstGlobal, 2, 1) \
F(InitializeConstContextSlot, 3, 1) \
F(OptimizeObjectForAddingMultipleProperties, 2, 1) \
@@ -376,7 +376,21 @@
\
F(SetFlags, 1, 1) \
F(CollectGarbage, 1, 1) \
- F(GetHeapUsage, 0, 1)
+ F(GetHeapUsage, 0, 1) \
+ \
+ /* LiveObjectList support*/ \
+ F(HasLOLEnabled, 0, 1) \
+ F(CaptureLOL, 0, 1) \
+ F(DeleteLOL, 1, 1) \
+ F(DumpLOL, 5, 1) \
+ F(GetLOLObj, 1, 1) \
+ F(GetLOLObjId, 1, 1) \
+ F(GetLOLObjRetainers, 6, 1) \
+ F(GetLOLPath, 3, 1) \
+ F(InfoLOL, 2, 1) \
+ F(PrintLOLObj, 1, 1) \
+ F(ResetLOL, 0, 1) \
+ F(SummarizeLOL, 3, 1)
#else
#define RUNTIME_FUNCTION_LIST_DEBUGGER_SUPPORT(F)
@@ -538,7 +552,8 @@
Handle<Object> object,
Handle<Object> key,
Handle<Object> value,
- PropertyAttributes attr);
+ PropertyAttributes attr,
+ StrictModeFlag strict);
MUST_USE_RESULT static MaybeObject* ForceSetObjectProperty(
Handle<JSObject> object,
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index abb26d6..360f0b7 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -498,13 +498,13 @@
JSObject* receiver,
int field_index,
Map* transition,
- Code::ExtraICState extra_ic_state) {
+ StrictModeFlag strict_mode) {
PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION;
Code::Flags flags = Code::ComputeMonomorphicFlags(
- Code::STORE_IC, type, extra_ic_state);
+ Code::STORE_IC, type, strict_mode);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- StoreStubCompiler compiler(extra_ic_state);
+ StoreStubCompiler compiler(strict_mode);
{ MaybeObject* maybe_code =
compiler.CompileStoreField(receiver, field_index, transition, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -521,13 +521,15 @@
}
-MaybeObject* StubCache::ComputeKeyedStoreSpecialized(JSObject* receiver) {
+MaybeObject* StubCache::ComputeKeyedStoreSpecialized(
+ JSObject* receiver,
+ StrictModeFlag strict_mode) {
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, NORMAL);
+ Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, NORMAL, strict_mode);
String* name = Heap::KeyedStoreSpecialized_symbol();
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- KeyedStoreStubCompiler compiler;
+ KeyedStoreStubCompiler compiler(strict_mode);
{ MaybeObject* maybe_code = compiler.CompileStoreSpecialized(receiver);
if (!maybe_code->ToObject(&code)) return maybe_code;
}
@@ -542,7 +544,9 @@
}
-MaybeObject* StubCache::ComputeKeyedStorePixelArray(JSObject* receiver) {
+MaybeObject* StubCache::ComputeKeyedStorePixelArray(
+ JSObject* receiver,
+ StrictModeFlag strict_mode) {
// Using NORMAL as the PropertyType for array element stores is a misuse. The
// generated stub always accesses fast elements, not slow-mode fields, but
// some property type is required for the stub lookup. Note that overloading
@@ -550,11 +554,11 @@
// other keyed field stores. This is guaranteed to be the case since all field
// keyed stores that are not array elements go through a generic builtin stub.
Code::Flags flags =
- Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, NORMAL);
+ Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, NORMAL, strict_mode);
String* name = Heap::KeyedStorePixelArray_symbol();
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- KeyedStoreStubCompiler compiler;
+ KeyedStoreStubCompiler compiler(strict_mode);
{ MaybeObject* maybe_code = compiler.CompileStorePixelArray(receiver);
if (!maybe_code->ToObject(&code)) return maybe_code;
}
@@ -598,11 +602,13 @@
MaybeObject* StubCache::ComputeKeyedLoadOrStoreExternalArray(
JSObject* receiver,
- bool is_store) {
+ bool is_store,
+ StrictModeFlag strict_mode) {
Code::Flags flags =
Code::ComputeMonomorphicFlags(
is_store ? Code::KEYED_STORE_IC : Code::KEYED_LOAD_IC,
- NORMAL);
+ NORMAL,
+ strict_mode);
ExternalArrayType array_type =
ElementsKindToExternalArrayType(receiver->GetElementsKind());
String* name =
@@ -615,9 +621,9 @@
Object* code = map->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
ExternalArrayStubCompiler compiler;
- { MaybeObject* maybe_code =
- is_store ? compiler.CompileKeyedStoreStub(array_type, flags) :
- compiler.CompileKeyedLoadStub(array_type, flags);
+ { MaybeObject* maybe_code = is_store
+ ? compiler.CompileKeyedStoreStub(array_type, flags)
+ : compiler.CompileKeyedLoadStub(array_type, flags);
if (!maybe_code->ToObject(&code)) return maybe_code;
}
if (is_store) {
@@ -637,8 +643,8 @@
}
-MaybeObject* StubCache::ComputeStoreNormal(Code::ExtraICState extra_ic_state) {
- return Builtins::builtin(extra_ic_state == StoreIC::kStoreICStrict
+MaybeObject* StubCache::ComputeStoreNormal(StrictModeFlag strict_mode) {
+ return Builtins::builtin((strict_mode == kStrictMode)
? Builtins::StoreIC_Normal_Strict
: Builtins::StoreIC_Normal);
}
@@ -647,12 +653,12 @@
MaybeObject* StubCache::ComputeStoreGlobal(String* name,
GlobalObject* receiver,
JSGlobalPropertyCell* cell,
- Code::ExtraICState extra_ic_state) {
+ StrictModeFlag strict_mode) {
Code::Flags flags = Code::ComputeMonomorphicFlags(
- Code::STORE_IC, NORMAL, extra_ic_state);
+ Code::STORE_IC, NORMAL, strict_mode);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- StoreStubCompiler compiler(extra_ic_state);
+ StoreStubCompiler compiler(strict_mode);
{ MaybeObject* maybe_code =
compiler.CompileStoreGlobal(receiver, cell, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -673,13 +679,13 @@
String* name,
JSObject* receiver,
AccessorInfo* callback,
- Code::ExtraICState extra_ic_state) {
+ StrictModeFlag strict_mode) {
ASSERT(v8::ToCData<Address>(callback->setter()) != 0);
Code::Flags flags = Code::ComputeMonomorphicFlags(
- Code::STORE_IC, CALLBACKS, extra_ic_state);
+ Code::STORE_IC, CALLBACKS, strict_mode);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- StoreStubCompiler compiler(extra_ic_state);
+ StoreStubCompiler compiler(strict_mode);
{ MaybeObject* maybe_code =
compiler.CompileStoreCallback(receiver, callback, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -699,12 +705,12 @@
MaybeObject* StubCache::ComputeStoreInterceptor(
String* name,
JSObject* receiver,
- Code::ExtraICState extra_ic_state) {
+ StrictModeFlag strict_mode) {
Code::Flags flags = Code::ComputeMonomorphicFlags(
- Code::STORE_IC, INTERCEPTOR, extra_ic_state);
+ Code::STORE_IC, INTERCEPTOR, strict_mode);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- StoreStubCompiler compiler(extra_ic_state);
+ StoreStubCompiler compiler(strict_mode);
{ MaybeObject* maybe_code =
compiler.CompileStoreInterceptor(receiver, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -724,12 +730,14 @@
MaybeObject* StubCache::ComputeKeyedStoreField(String* name,
JSObject* receiver,
int field_index,
- Map* transition) {
+ Map* transition,
+ StrictModeFlag strict_mode) {
PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION;
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, type);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::KEYED_STORE_IC, type, strict_mode);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- KeyedStoreStubCompiler compiler;
+ KeyedStoreStubCompiler compiler(strict_mode);
{ MaybeObject* maybe_code =
compiler.CompileStoreField(receiver, field_index, transition, name);
if (!maybe_code->ToObject(&code)) return maybe_code;
@@ -1417,12 +1425,17 @@
MaybeObject* StoreInterceptorProperty(Arguments args) {
+ ASSERT(args.length() == 4);
JSObject* recv = JSObject::cast(args[0]);
String* name = String::cast(args[1]);
Object* value = args[2];
+ StrictModeFlag strict =
+ static_cast<StrictModeFlag>(Smi::cast(args[3])->value());
+ ASSERT(strict == kStrictMode || strict == kNonStrictMode);
ASSERT(recv->HasNamedInterceptor());
PropertyAttributes attr = NONE;
- MaybeObject* result = recv->SetPropertyWithInterceptor(name, value, attr);
+ MaybeObject* result = recv->SetPropertyWithInterceptor(
+ name, value, attr, strict);
return result;
}
@@ -1675,8 +1688,8 @@
MaybeObject* StoreStubCompiler::GetCode(PropertyType type, String* name) {
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, type,
- extra_ic_state_);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::STORE_IC, type, strict_mode_);
MaybeObject* result = GetCodeWithFlags(flags, name);
if (!result->IsFailure()) {
PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG,
@@ -1691,7 +1704,8 @@
MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type, String* name) {
- Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, type);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::KEYED_STORE_IC, type, strict_mode_);
MaybeObject* result = GetCodeWithFlags(flags, name);
if (!result->IsFailure()) {
PROFILE(CodeCreateEvent(Logger::KEYED_STORE_IC_TAG,
diff --git a/src/stub-cache.h b/src/stub-cache.h
index 4638da2..6927076 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -143,27 +143,27 @@
JSObject* receiver,
int field_index,
Map* transition,
- Code::ExtraICState extra_ic_state);
+ StrictModeFlag strict_mode);
MUST_USE_RESULT static MaybeObject* ComputeStoreNormal(
- Code::ExtraICState extra_ic_state);
+ StrictModeFlag strict_mode);
MUST_USE_RESULT static MaybeObject* ComputeStoreGlobal(
String* name,
GlobalObject* receiver,
JSGlobalPropertyCell* cell,
- Code::ExtraICState extra_ic_state);
+ StrictModeFlag strict_mode);
MUST_USE_RESULT static MaybeObject* ComputeStoreCallback(
String* name,
JSObject* receiver,
AccessorInfo* callback,
- Code::ExtraICState extra_ic_state);
+ StrictModeFlag strict_mode);
MUST_USE_RESULT static MaybeObject* ComputeStoreInterceptor(
String* name,
JSObject* receiver,
- Code::ExtraICState extra_ic_state);
+ StrictModeFlag strict_mode);
// ---
@@ -171,17 +171,21 @@
String* name,
JSObject* receiver,
int field_index,
- Map* transition = NULL);
+ Map* transition,
+ StrictModeFlag strict_mode);
MUST_USE_RESULT static MaybeObject* ComputeKeyedStoreSpecialized(
- JSObject* receiver);
+ JSObject* receiver,
+ StrictModeFlag strict_mode);
MUST_USE_RESULT static MaybeObject* ComputeKeyedStorePixelArray(
- JSObject* receiver);
+ JSObject* receiver,
+ StrictModeFlag strict_mode);
MUST_USE_RESULT static MaybeObject* ComputeKeyedLoadOrStoreExternalArray(
JSObject* receiver,
- bool is_store);
+ bool is_store,
+ StrictModeFlag strict_mode);
// ---
@@ -628,8 +632,8 @@
class StoreStubCompiler: public StubCompiler {
public:
- explicit StoreStubCompiler(Code::ExtraICState extra_ic_state)
- : extra_ic_state_(extra_ic_state) { }
+ explicit StoreStubCompiler(StrictModeFlag strict_mode)
+ : strict_mode_(strict_mode) { }
MUST_USE_RESULT MaybeObject* CompileStoreField(JSObject* object,
int index,
@@ -649,12 +653,15 @@
private:
MaybeObject* GetCode(PropertyType type, String* name);
- Code::ExtraICState extra_ic_state_;
+ StrictModeFlag strict_mode_;
};
class KeyedStoreStubCompiler: public StubCompiler {
public:
+ explicit KeyedStoreStubCompiler(StrictModeFlag strict_mode)
+ : strict_mode_(strict_mode) { }
+
MUST_USE_RESULT MaybeObject* CompileStoreField(JSObject* object,
int index,
Map* transition,
@@ -666,6 +673,8 @@
private:
MaybeObject* GetCode(PropertyType type, String* name);
+
+ StrictModeFlag strict_mode_;
};
diff --git a/src/version.cc b/src/version.cc
index 2475fb2..391addc 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 1
-#define BUILD_NUMBER 7
+#define BUILD_NUMBER 8
#define PATCH_LEVEL 0
#define CANDIDATE_VERSION false
diff --git a/src/virtual-frame-heavy-inl.h b/src/virtual-frame-heavy-inl.h
index ac844b4..cf12eca 100644
--- a/src/virtual-frame-heavy-inl.h
+++ b/src/virtual-frame-heavy-inl.h
@@ -87,14 +87,6 @@
}
-void VirtualFrame::Push(Handle<Object> value) {
- ASSERT(!ConstantPoolOverflowed());
- FrameElement element =
- FrameElement::ConstantElement(value, FrameElement::NOT_SYNCED);
- elements_.Add(element);
-}
-
-
bool VirtualFrame::Equals(VirtualFrame* other) {
#ifdef DEBUG
for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 6cfeed3..42373e3 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -2017,8 +2017,8 @@
__ AbortIfSmi(rax);
}
- __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
- __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &slow);
// Operand is a float, negate its value by flipping sign bit.
__ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
@@ -2047,8 +2047,8 @@
}
// Check if the operand is a heap number.
- __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
- __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &slow);
// Convert the heap number in rax to an untagged integer in rcx.
@@ -2081,6 +2081,157 @@
}
+void MathPowStub::Generate(MacroAssembler* masm) {
+ // Registers are used as follows:
+ // rdx = base
+ // rax = exponent
+ // rcx = temporary, result
+
+ Label allocate_return, call_runtime;
+
+ // Load input parameters.
+ __ movq(rdx, Operand(rsp, 2 * kPointerSize));
+ __ movq(rax, Operand(rsp, 1 * kPointerSize));
+
+ // Save 1 in xmm3 - we need this several times later on.
+ __ movl(rcx, Immediate(1));
+ __ cvtlsi2sd(xmm3, rcx);
+
+ Label exponent_nonsmi;
+ Label base_nonsmi;
+ // If the exponent is a heap number go to that specific case.
+ __ JumpIfNotSmi(rax, &exponent_nonsmi);
+ __ JumpIfNotSmi(rdx, &base_nonsmi);
+
+ // Optimized version when both exponent and base are smis.
+ Label powi;
+ __ SmiToInteger32(rdx, rdx);
+ __ cvtlsi2sd(xmm0, rdx);
+ __ jmp(&powi);
+ // Exponent is a smi and base is a heapnumber.
+ __ bind(&base_nonsmi);
+ __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &call_runtime);
+
+ __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
+
+ // Optimized version of pow if exponent is a smi.
+ // xmm0 contains the base.
+ __ bind(&powi);
+ __ SmiToInteger32(rax, rax);
+
+ // Save exponent in base as we need to check if exponent is negative later.
+ // We know that base and exponent are in different registers.
+ __ movq(rdx, rax);
+
+ // Get absolute value of exponent.
+ NearLabel no_neg;
+ __ cmpl(rax, Immediate(0));
+ __ j(greater_equal, &no_neg);
+ __ negl(rax);
+ __ bind(&no_neg);
+
+ // Load xmm1 with 1.
+ __ movsd(xmm1, xmm3);
+ NearLabel while_true;
+ NearLabel no_multiply;
+
+ __ bind(&while_true);
+ __ shrl(rax, Immediate(1));
+ __ j(not_carry, &no_multiply);
+ __ mulsd(xmm1, xmm0);
+ __ bind(&no_multiply);
+ __ mulsd(xmm0, xmm0);
+ __ j(not_zero, &while_true);
+
+ // Base has the original value of the exponent - if the exponent is
+ // negative return 1/result.
+ __ testl(rdx, rdx);
+ __ j(positive, &allocate_return);
+ // Special case if xmm1 has reached infinity.
+ __ divsd(xmm3, xmm1);
+ __ movsd(xmm1, xmm3);
+ __ xorpd(xmm0, xmm0);
+ __ ucomisd(xmm0, xmm1);
+ __ j(equal, &call_runtime);
+
+ __ jmp(&allocate_return);
+
+ // Exponent (or both) is a heapnumber - no matter what we should now work
+ // on doubles.
+ __ bind(&exponent_nonsmi);
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &call_runtime);
+ __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
+ // Test if exponent is nan.
+ __ ucomisd(xmm1, xmm1);
+ __ j(parity_even, &call_runtime);
+
+ NearLabel base_not_smi;
+ NearLabel handle_special_cases;
+ __ JumpIfNotSmi(rdx, &base_not_smi);
+ __ SmiToInteger32(rdx, rdx);
+ __ cvtlsi2sd(xmm0, rdx);
+ __ jmp(&handle_special_cases);
+
+ __ bind(&base_not_smi);
+ __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &call_runtime);
+ __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset));
+ __ andl(rcx, Immediate(HeapNumber::kExponentMask));
+ __ cmpl(rcx, Immediate(HeapNumber::kExponentMask));
+ // base is NaN or +/-Infinity
+ __ j(greater_equal, &call_runtime);
+ __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
+
+ // base is in xmm0 and exponent is in xmm1.
+ __ bind(&handle_special_cases);
+ NearLabel not_minus_half;
+ // Test for -0.5.
+ // Load xmm2 with -0.5.
+ __ movq(rcx, V8_UINT64_C(0xBFE0000000000000), RelocInfo::NONE);
+ __ movq(xmm2, rcx);
+ // xmm2 now has -0.5.
+ __ ucomisd(xmm2, xmm1);
+ __ j(not_equal, ¬_minus_half);
+
+ // Calculates reciprocal of square root.
+ // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
+ __ xorpd(xmm1, xmm1);
+ __ addsd(xmm1, xmm0);
+ __ sqrtsd(xmm1, xmm1);
+ __ divsd(xmm3, xmm1);
+ __ movsd(xmm1, xmm3);
+ __ jmp(&allocate_return);
+
+ // Test for 0.5.
+ __ bind(¬_minus_half);
+ // Load xmm2 with 0.5.
+ // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
+ __ addsd(xmm2, xmm3);
+ // xmm2 now has 0.5.
+ __ ucomisd(xmm2, xmm1);
+ __ j(not_equal, &call_runtime);
+ // Calculates square root.
+ // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
+ __ xorpd(xmm1, xmm1);
+ __ addsd(xmm1, xmm0);
+ __ sqrtsd(xmm1, xmm1);
+
+ __ bind(&allocate_return);
+ __ AllocateHeapNumber(rcx, rax, &call_runtime);
+ __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm1);
+ __ movq(rax, rcx);
+ __ ret(2 * kPointerSize);
+
+ __ bind(&call_runtime);
+ __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
+}
+
+
void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// The key is in rdx and the parameter count is in rax.
@@ -4682,6 +4833,61 @@
__ TailCallRuntime(Runtime::kStringCompare, 2, 1);
}
+
+void StringCharAtStub::Generate(MacroAssembler* masm) {
+ // Expects two arguments (object, index) on the stack:
+
+ // Stack frame on entry.
+ // rsp[0]: return address
+ // rsp[8]: index
+ // rsp[16]: object
+
+ Register object = rbx;
+ Register index = rax;
+ Register scratch1 = rcx;
+ Register scratch2 = rdx;
+ Register result = rax;
+
+ __ pop(scratch1); // Return address.
+ __ pop(index);
+ __ pop(object);
+ __ push(scratch1);
+
+ Label need_conversion;
+ Label index_out_of_range;
+ Label done;
+ StringCharAtGenerator generator(object,
+ index,
+ scratch1,
+ scratch2,
+ result,
+ &need_conversion,
+ &need_conversion,
+ &index_out_of_range,
+ STRING_INDEX_IS_NUMBER);
+ generator.GenerateFast(masm);
+ __ jmp(&done);
+
+ __ bind(&index_out_of_range);
+ // When the index is out of range, the spec requires us to return
+ // the empty string.
+ __ Move(result, Factory::empty_string());
+ __ jmp(&done);
+
+ __ bind(&need_conversion);
+ // Move smi zero into the result register, which will trigger
+ // conversion.
+ __ Move(result, Smi::FromInt(0));
+ __ jmp(&done);
+
+ StubRuntimeCallHelper call_helper;
+ generator.GenerateSlow(masm, call_helper);
+
+ __ bind(&done);
+ __ ret(0);
+}
+
+
void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::SMIS);
NearLabel miss;
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index dfee36e..ad114c2 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -2747,7 +2747,8 @@
frame_->EmitPush(rsi); // The context is the first argument.
frame_->EmitPush(kScratchRegister);
frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0));
- Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
+ frame_->EmitPush(Smi::FromInt(strict_mode_flag()));
+ Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 4);
// Return value is ignored.
}
@@ -4605,7 +4606,8 @@
// by initialization.
value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
} else {
- value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
+ frame_->Push(Smi::FromInt(strict_mode_flag()));
+ value = frame_->CallRuntime(Runtime::kStoreContextSlot, 4);
}
// Storing a variable must keep the (new) value on the expression
// stack. This is necessary for compiling chained assignment
@@ -4694,18 +4696,7 @@
void CodeGenerator::VisitLiteral(Literal* node) {
Comment cmnt(masm_, "[ Literal");
- if (frame_->ConstantPoolOverflowed()) {
- Result temp = allocator_->Allocate();
- ASSERT(temp.is_valid());
- if (node->handle()->IsSmi()) {
- __ Move(temp.reg(), Smi::cast(*node->handle()));
- } else {
- __ movq(temp.reg(), node->handle(), RelocInfo::EMBEDDED_OBJECT);
- }
- frame_->Push(&temp);
- } else {
- frame_->Push(node->handle());
- }
+ frame_->Push(node->handle());
}
@@ -4925,8 +4916,9 @@
Load(property->key());
Load(property->value());
if (property->emit_store()) {
+ frame_->Push(Smi::FromInt(NONE)); // PropertyAttributes
// Ignore the result.
- Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
+ Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 4);
} else {
frame_->Drop(3);
}
@@ -8086,8 +8078,12 @@
public:
DeferredReferenceSetKeyedValue(Register value,
Register key,
- Register receiver)
- : value_(value), key_(key), receiver_(receiver) {
+ Register receiver,
+ StrictModeFlag strict_mode)
+ : value_(value),
+ key_(key),
+ receiver_(receiver),
+ strict_mode_(strict_mode) {
set_comment("[ DeferredReferenceSetKeyedValue");
}
@@ -8100,6 +8096,7 @@
Register key_;
Register receiver_;
Label patch_site_;
+ StrictModeFlag strict_mode_;
};
@@ -8151,7 +8148,9 @@
}
// Call the IC stub.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
// The delta from the start of the map-compare instructions (initial movq)
// to the test instruction. We use masm_-> directly here instead of the
@@ -8492,7 +8491,8 @@
DeferredReferenceSetKeyedValue* deferred =
new DeferredReferenceSetKeyedValue(result.reg(),
key.reg(),
- receiver.reg());
+ receiver.reg(),
+ strict_mode_flag());
// Check that the receiver is not a smi.
__ JumpIfSmi(receiver.reg(), deferred->entry_label());
@@ -8554,7 +8554,7 @@
deferred->BindExit();
} else {
- result = frame()->CallKeyedStoreIC();
+ result = frame()->CallKeyedStoreIC(strict_mode_flag());
// Make sure that we do not have a test instruction after the
// call. A test instruction after the call is used to
// indicate that we have generated an inline version of the
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index b8d7e50..0ad6ec2 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -327,13 +327,6 @@
}
-FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
- Token::Value op, Expression* left, Expression* right) {
- ASSERT(ShouldInlineSmiCase(op));
- return kNoConstants;
-}
-
-
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
}
@@ -742,7 +735,9 @@
prop->key()->AsLiteral()->handle()->IsSmi());
__ Move(rcx, prop->key()->AsLiteral()->handle());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(is_strict()
+ ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
}
}
@@ -759,7 +754,8 @@
__ push(rsi); // The context is the first argument.
__ Push(pairs);
__ Push(Smi::FromInt(is_eval() ? 1 : 0));
- __ CallRuntime(Runtime::kDeclareGlobals, 3);
+ __ Push(Smi::FromInt(strict_mode_flag()));
+ __ CallRuntime(Runtime::kDeclareGlobals, 4);
// Return value is ignored.
}
@@ -1403,7 +1399,8 @@
VisitForStackValue(key);
VisitForStackValue(value);
if (property->emit_store()) {
- __ CallRuntime(Runtime::kSetProperty, 3);
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
+ __ CallRuntime(Runtime::kSetProperty, 4);
} else {
__ Drop(3);
}
@@ -1579,14 +1576,8 @@
}
Token::Value op = expr->binary_op();
- ConstantOperand constant = ShouldInlineSmiCase(op)
- ? GetConstantOperand(op, expr->target(), expr->value())
- : kNoConstants;
- ASSERT(constant == kRightConstant || constant == kNoConstants);
- if (constant == kNoConstants) {
- __ push(rax); // Left operand goes on the stack.
- VisitForAccumulatorValue(expr->value());
- }
+ __ push(rax); // Left operand goes on the stack.
+ VisitForAccumulatorValue(expr->value());
OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
? OVERWRITE_RIGHT
@@ -1598,8 +1589,7 @@
op,
mode,
expr->target(),
- expr->value(),
- constant);
+ expr->value());
} else {
EmitBinaryOp(op, mode);
}
@@ -1650,10 +1640,7 @@
Token::Value op,
OverwriteMode mode,
Expression* left,
- Expression* right,
- ConstantOperand constant) {
- ASSERT(constant == kNoConstants); // Only handled case.
-
+ Expression* right) {
// Do combined smi check of the operands. Left operand is on the
// stack (popped into rdx). Right operand is in rax but moved into
// rcx to make the shifts easier.
@@ -1750,7 +1737,9 @@
__ movq(rdx, rax);
__ pop(rax); // Restore value.
__ Move(rcx, prop->key()->AsLiteral()->handle());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
@@ -1771,7 +1760,9 @@
__ pop(rdx);
}
__ pop(rax); // Restore value.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
break;
}
@@ -1866,7 +1857,8 @@
__ push(rax); // Value.
__ push(rsi); // Context.
__ Push(var->name());
- __ CallRuntime(Runtime::kStoreContextSlot, 3);
+ __ Push(Smi::FromInt(strict_mode_flag()));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
break;
}
}
@@ -1897,7 +1889,9 @@
} else {
__ pop(rdx);
}
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
@@ -1935,7 +1929,9 @@
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
// If the assignment ends an initialization block, revert to fast case.
@@ -2051,6 +2047,27 @@
}
+void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
+ int arg_count) {
+ // Push copy of the first argument or undefined if it doesn't exist.
+ if (arg_count > 0) {
+ __ push(Operand(rsp, arg_count * kPointerSize));
+ } else {
+ __ PushRoot(Heap::kUndefinedValueRootIndex);
+ }
+
+ // Push the receiver of the enclosing function and do runtime call.
+ __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
+
+ // Push the strict mode flag.
+ __ Push(Smi::FromInt(strict_mode_flag()));
+
+ __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
+ ? Runtime::kResolvePossiblyDirectEvalNoLookup
+ : Runtime::kResolvePossiblyDirectEval, 4);
+}
+
+
void FullCodeGenerator::VisitCall(Call* expr) {
#ifdef DEBUG
// We want to verify that RecordJSReturnSite gets called on all paths
@@ -2078,21 +2095,30 @@
VisitForStackValue(args->at(i));
}
- // Push copy of the function - found below the arguments.
- __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
-
- // Push copy of the first argument or undefined if it doesn't exist.
- if (arg_count > 0) {
- __ push(Operand(rsp, arg_count * kPointerSize));
- } else {
- __ PushRoot(Heap::kUndefinedValueRootIndex);
+ // If we know that eval can only be shadowed by eval-introduced
+ // variables we attempt to load the global eval function directly
+ // in generated code. If we succeed, there is no need to perform a
+ // context lookup in the runtime system.
+ Label done;
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ Label slow;
+ EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
+ NOT_INSIDE_TYPEOF,
+ &slow);
+ // Push the function and resolve eval.
+ __ push(rax);
+ EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
+ __ jmp(&done);
+ __ bind(&slow);
}
- // Push the receiver of the enclosing function and do runtime call.
- __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
- // Push the strict mode flag.
- __ Push(Smi::FromInt(strict_mode_flag()));
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
+ // Push copy of the function (found below the arguments) and
+ // resolve eval.
+ __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
+ EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
+ if (done.is_linked()) {
+ __ bind(&done);
+ }
// The runtime call returns a pair of values in rax (function) and
// rdx (receiver). Touch up the stack with the right values.
@@ -2709,7 +2735,8 @@
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- __ CallRuntime(Runtime::kMath_pow, 2);
+ MathPowStub stub;
+ __ CallStub(&stub);
context()->Plug(rax);
}
@@ -2968,7 +2995,73 @@
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
+ Label done;
+ Label slow_case;
+ Register object = rax;
+ Register index_1 = rbx;
+ Register index_2 = rcx;
+ Register elements = rdi;
+ Register temp = rdx;
+ __ movq(object, Operand(rsp, 2 * kPointerSize));
+ // Fetch the map and check if array is in fast case.
+ // Check that object doesn't require security checks and
+ // has no indexed interceptor.
+ __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
+ __ j(below, &slow_case);
+ __ testb(FieldOperand(temp, Map::kBitFieldOffset),
+ Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
+ __ j(not_zero, &slow_case);
+
+ // Check the object's elements are in fast case and writable.
+ __ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
+ __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
+ Heap::kFixedArrayMapRootIndex);
+ __ j(not_equal, &slow_case);
+
+ // Check that both indices are smis.
+ __ movq(index_1, Operand(rsp, 1 * kPointerSize));
+ __ movq(index_2, Operand(rsp, 0 * kPointerSize));
+ __ JumpIfNotBothSmi(index_1, index_2, &slow_case);
+
+ // Check that both indices are valid.
+ // The JSArray length field is a smi since the array is in fast case mode.
+ __ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
+ __ SmiCompare(temp, index_1);
+ __ j(below_equal, &slow_case);
+ __ SmiCompare(temp, index_2);
+ __ j(below_equal, &slow_case);
+
+ __ SmiToInteger32(index_1, index_1);
+ __ SmiToInteger32(index_2, index_2);
+ // Bring addresses into index1 and index2.
+ __ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
+ FixedArray::kHeaderSize));
+
+ // Swap elements. Use object and temp as scratch registers.
+ __ movq(object, Operand(index_1, 0));
+ __ movq(temp, Operand(index_2, 0));
+ __ movq(Operand(index_2, 0), object);
+ __ movq(Operand(index_1, 0), temp);
+
+ Label new_space;
+ __ InNewSpace(elements, temp, equal, &new_space);
+
+ __ movq(object, elements);
+ __ RecordWriteHelper(object, index_1, temp);
+ __ RecordWriteHelper(elements, index_2, temp);
+
+ __ bind(&new_space);
+ // We are done. Drop elements from the stack, and return undefined.
+ __ addq(rsp, Immediate(3 * kPointerSize));
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
+ __ jmp(&done);
+
+ __ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
+
+ __ bind(&done);
context()->Plug(rax);
}
@@ -3449,7 +3542,9 @@
case NAMED_PROPERTY: {
__ Move(rcx, prop->key()->AsLiteral()->handle());
__ pop(rdx);
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
@@ -3464,7 +3559,9 @@
case KEYED_PROPERTY: {
__ pop(rcx);
__ pop(rdx);
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
EmitCallIC(ic, RelocInfo::CODE_TARGET);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 55d837c..b3243cf 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -766,7 +766,8 @@
}
-void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
+void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : key
@@ -813,7 +814,7 @@
__ bind(&slow);
__ Integer32ToSmi(rcx, rcx);
__ bind(&slow_with_tagged_index);
- GenerateRuntimeSetProperty(masm);
+ GenerateRuntimeSetProperty(masm, strict_mode);
// Never returns to here.
// Check whether the elements is a pixel array.
@@ -1474,7 +1475,7 @@
void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
- Code::ExtraICState extra_ic_state) {
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : name
@@ -1486,7 +1487,7 @@
Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
NOT_IN_LOOP,
MONOMORPHIC,
- extra_ic_state);
+ strict_mode);
StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg);
// Cache miss: Jump to runtime.
@@ -1593,7 +1594,8 @@
}
-void StoreIC::GenerateGlobalProxy(MacroAssembler* masm) {
+void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : name
@@ -1604,14 +1606,17 @@
__ push(rdx);
__ push(rcx);
__ push(rax);
- __ push(rbx);
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
+ __ Push(Smi::FromInt(strict_mode));
+ __ push(rbx); // return address
// Do tail-call to runtime routine.
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
}
-void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm) {
+void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
+ StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : key
@@ -1623,10 +1628,12 @@
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
+ __ Push(Smi::FromInt(strict_mode)); // Strict mode.
__ push(rbx); // return address
// Do tail-call to runtime routine.
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
}
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index 90244f1..0ae8a00 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -77,6 +77,7 @@
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
+ GenerateJumpTable() &&
GenerateSafepointTable();
}
@@ -240,6 +241,16 @@
}
+bool LCodeGen::GenerateJumpTable() {
+ for (int i = 0; i < jump_table_.length(); i++) {
+ JumpTableEntry* info = jump_table_[i];
+ __ bind(&(info->label_));
+ __ Jump(info->address_, RelocInfo::RUNTIME_ENTRY);
+ }
+ return !is_aborted();
+}
+
+
bool LCodeGen::GenerateDeferredCode() {
ASSERT(is_generating());
for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
@@ -512,10 +523,17 @@
if (cc == no_condition) {
__ Jump(entry, RelocInfo::RUNTIME_ENTRY);
} else {
- NearLabel done;
- __ j(NegateCondition(cc), &done);
- __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
- __ bind(&done);
+ JumpTableEntry* jump_info = NULL;
+ // We often have several deopts to the same entry, reuse the last
+ // jump entry if this is the case.
+ if (jump_table_.length() > 0 &&
+ jump_table_[jump_table_.length() - 1]->address_ == entry) {
+ jump_info = jump_table_[jump_table_.length() - 1];
+ } else {
+ jump_info = new JumpTableEntry(entry);
+ jump_table_.Add(jump_info);
+ }
+ __ j(cc, &jump_info->label_);
}
}
@@ -527,7 +545,8 @@
Handle<DeoptimizationInputData> data =
Factory::NewDeoptimizationInputData(length, TENURED);
- data->SetTranslationByteArray(*translations_.CreateByteArray());
+ Handle<ByteArray> translations = translations_.CreateByteArray();
+ data->SetTranslationByteArray(*translations);
data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
Handle<FixedArray> literals =
@@ -686,13 +705,13 @@
break;
}
case CodeStub::StringCharAt: {
- // TODO(1116): Add StringCharAt stub to x64.
- Abort("Unimplemented: %s", "StringCharAt Stub");
+ StringCharAtStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::MathPow: {
- // TODO(1115): Add MathPow stub to x64.
- Abort("Unimplemented: %s", "MathPow Stub");
+ MathPowStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::NumberToString: {
@@ -1613,7 +1632,17 @@
void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
- Abort("Unimplemented: %s", "DoHasCachedArrayIndex");
+ Register input = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+
+ ASSERT(instr->hydrogen()->value()->representation().IsTagged());
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
+ __ testl(FieldOperand(input, String::kHashFieldOffset),
+ Immediate(String::kContainsCachedArrayIndexMask));
+ NearLabel done;
+ __ j(not_zero, &done);
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
+ __ bind(&done);
}
@@ -1809,9 +1838,7 @@
__ push(ToRegister(instr->InputAt(0)));
__ Push(instr->function());
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
- RecordSafepointWithRegisters(
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ movq(kScratchRegister, rax);
__ PopSafepointRegisters();
__ testq(kScratchRegister, kScratchRegister);
@@ -2462,7 +2489,54 @@
void LCodeGen::DoPower(LPower* instr) {
- Abort("Unimplemented: %s", "DoPower");
+ LOperand* left = instr->InputAt(0);
+ XMMRegister left_reg = ToDoubleRegister(left);
+ ASSERT(!left_reg.is(xmm1));
+ LOperand* right = instr->InputAt(1);
+ XMMRegister result_reg = ToDoubleRegister(instr->result());
+ Representation exponent_type = instr->hydrogen()->right()->representation();
+ if (exponent_type.IsDouble()) {
+ __ PrepareCallCFunction(2);
+ // Move arguments to correct registers
+ __ movsd(xmm0, left_reg);
+ ASSERT(ToDoubleRegister(right).is(xmm1));
+ __ CallCFunction(ExternalReference::power_double_double_function(), 2);
+ } else if (exponent_type.IsInteger32()) {
+ __ PrepareCallCFunction(2);
+ // Move arguments to correct registers: xmm0 and edi (not rdi).
+ // On Windows, the registers are xmm0 and edx.
+ __ movsd(xmm0, left_reg);
+#ifdef _WIN64
+ ASSERT(ToRegister(right).is(rdx));
+#else
+ ASSERT(ToRegister(right).is(rdi));
+#endif
+ __ CallCFunction(ExternalReference::power_double_int_function(), 2);
+ } else {
+ ASSERT(exponent_type.IsTagged());
+ CpuFeatures::Scope scope(SSE2);
+ Register right_reg = ToRegister(right);
+
+ Label non_smi, call;
+ __ JumpIfNotSmi(right_reg, &non_smi);
+ __ SmiToInteger32(right_reg, right_reg);
+ __ cvtlsi2sd(xmm1, right_reg);
+ __ jmp(&call);
+
+ __ bind(&non_smi);
+ __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister);
+ DeoptimizeIf(not_equal, instr->environment());
+ __ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset));
+
+ __ bind(&call);
+ __ PrepareCallCFunction(2);
+ // Move arguments to correct registers xmm0 and xmm1.
+ __ movsd(xmm0, left_reg);
+ // Right argument is already in xmm1.
+ __ CallCFunction(ExternalReference::power_double_double_function(), 2);
+ }
+ // Return value is in xmm0.
+ __ movsd(result_reg, xmm0);
}
@@ -2623,7 +2697,9 @@
ASSERT(ToRegister(instr->value()).is(rax));
__ Move(rcx, instr->hydrogen()->name());
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ info_->is_strict() ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
@@ -2692,7 +2768,9 @@
ASSERT(ToRegister(instr->key()).is(rcx));
ASSERT(ToRegister(instr->value()).is(rax));
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index 1cac4e9..420556a 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -53,6 +53,7 @@
current_instruction_(-1),
instructions_(chunk->instructions()),
deoptimizations_(4),
+ jump_table_(4),
deoptimization_literals_(8),
inlined_function_count_(0),
scope_(chunk->graph()->info()->scope()),
@@ -147,6 +148,7 @@
bool GeneratePrologue();
bool GenerateBody();
bool GenerateDeferredCode();
+ bool GenerateJumpTable();
bool GenerateSafepointTable();
void CallCode(Handle<Code> code,
@@ -234,6 +236,14 @@
// Emits code for pushing a constant operand.
void EmitPushConstantOperand(LOperand* operand);
+ struct JumpTableEntry {
+ inline JumpTableEntry(Address address)
+ : label_(),
+ address_(address) { }
+ Label label_;
+ Address address_;
+ };
+
LChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
@@ -242,6 +252,7 @@
int current_instruction_;
const ZoneList<LInstruction*>* instructions_;
ZoneList<LEnvironment*> deoptimizations_;
+ ZoneList<JumpTableEntry*> jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
int inlined_function_count_;
Scope* const scope_;
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index 8db1ba9..18b38e2 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -1158,9 +1158,8 @@
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
- new LInstanceOfKnownGlobal(UseRegisterAtStart(instr->value()));
- MarkAsSaveDoubles(result);
- return AssignEnvironment(AssignPointerMap(DefineFixed(result, rax)));
+ new LInstanceOfKnownGlobal(UseFixed(instr->value(), rax));
+ return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1436,8 +1435,22 @@
LInstruction* LChunkBuilder::DoPower(HPower* instr) {
- Abort("Unimplemented: %s", "DoPower");
- return NULL;
+ ASSERT(instr->representation().IsDouble());
+ // We call a C function for double power. It can't trigger a GC.
+ // We need to use fixed result register for the call.
+ Representation exponent_type = instr->right()->representation();
+ ASSERT(instr->left()->representation().IsDouble());
+ LOperand* left = UseFixedDouble(instr->left(), xmm2);
+ LOperand* right = exponent_type.IsDouble() ?
+ UseFixedDouble(instr->right(), xmm1) :
+#ifdef _WIN64
+ UseFixed(instr->right(), rdx);
+#else
+ UseFixed(instr->right(), rdi);
+#endif
+ LPower* result = new LPower(left, right);
+ return MarkAsCall(DefineFixedDouble(result, xmm1), instr,
+ CAN_DEOPTIMIZE_EAGERLY);
}
@@ -1518,8 +1531,9 @@
LInstruction* LChunkBuilder::DoHasCachedArrayIndex(
HHasCachedArrayIndex* instr) {
- Abort("Unimplemented: %s", "DoHasCachedArrayIndex");
- return NULL;
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegister(instr->value());
+ return DefineAsRegister(new LHasCachedArrayIndex(value));
}
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index 774de71..109985c 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -2408,12 +2408,13 @@
__ push(rdx); // receiver
__ push(rcx); // name
__ push(rax); // value
+ __ Push(Smi::FromInt(strict_mode_));
__ push(rbx); // restore return address
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
- __ TailCallExternalReference(store_ic_property, 3, 1);
+ __ TailCallExternalReference(store_ic_property, 4, 1);
// Handle store cache miss.
__ bind(&miss);
@@ -3490,10 +3491,13 @@
__ push(rdx); // receiver
__ push(rcx); // key
__ push(rax); // value
+ __ Push(Smi::FromInt(NONE)); // PropertyAttributes
+ __ Push(Smi::FromInt(
+ Code::ExtractExtraICStateFromFlags(flags) & kStrictMode));
__ push(rbx); // return address
// Do tail-call to runtime routine.
- __ TailCallRuntime(Runtime::kSetProperty, 3, 1);
+ __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
return GetCode(flags);
}
diff --git a/src/x64/virtual-frame-x64.cc b/src/x64/virtual-frame-x64.cc
index ea115f2..c4d7e65 100644
--- a/src/x64/virtual-frame-x64.cc
+++ b/src/x64/virtual-frame-x64.cc
@@ -274,6 +274,24 @@
}
+void VirtualFrame::Push(Handle<Object> value) {
+ if (ConstantPoolOverflowed()) {
+ Result temp = cgen()->allocator()->Allocate();
+ ASSERT(temp.is_valid());
+ if (value->IsSmi()) {
+ __ Move(temp.reg(), Smi::cast(*value));
+ } else {
+ __ movq(temp.reg(), value, RelocInfo::EMBEDDED_OBJECT);
+ }
+ Push(&temp);
+ } else {
+ FrameElement element =
+ FrameElement::ConstantElement(value, FrameElement::NOT_SYNCED);
+ elements_.Add(element);
+ }
+}
+
+
void VirtualFrame::Drop(int count) {
ASSERT(count >= 0);
ASSERT(height() >= count);
@@ -1124,9 +1142,9 @@
StrictModeFlag strict_mode) {
// Value and (if not contextual) receiver are on top of the frame.
// The IC expects name in rcx, value in rax, and receiver in rdx.
- Handle<Code> ic(Builtins::builtin(strict_mode == kStrictMode
- ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
+ : Builtins::StoreIC_Initialize));
Result value = Pop();
RelocInfo::Mode mode;
if (is_contextual) {
@@ -1146,7 +1164,7 @@
}
-Result VirtualFrame::CallKeyedStoreIC() {
+Result VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) {
// Value, key, and receiver are on the top of the frame. The IC
// expects value in rax, key in rcx, and receiver in rdx.
Result value = Pop();
@@ -1190,7 +1208,9 @@
receiver.Unuse();
}
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ Handle<Code> ic(Builtins::builtin(
+ (strict_mode == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
+ : Builtins::KeyedStoreIC_Initialize));
return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
}
diff --git a/src/x64/virtual-frame-x64.h b/src/x64/virtual-frame-x64.h
index 824743d..7396db1 100644
--- a/src/x64/virtual-frame-x64.h
+++ b/src/x64/virtual-frame-x64.h
@@ -343,7 +343,7 @@
// Call keyed store IC. Value, key, and receiver are found on top
// of the frame. All three are dropped.
- Result CallKeyedStoreIC();
+ Result CallKeyedStoreIC(StrictModeFlag strict_mode);
// Call call IC. Function name, arguments, and receiver are found on top
// of the frame and dropped by the call.
@@ -403,8 +403,8 @@
inline bool ConstantPoolOverflowed();
// Push an element on the virtual frame.
+ void Push(Handle<Object> value);
inline void Push(Register reg, TypeInfo info = TypeInfo::Unknown());
- inline void Push(Handle<Object> value);
inline void Push(Smi* value);
// Pushing a result invalidates it (its contents become owned by the
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index b3c52f1..f450a34 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -10091,10 +10091,11 @@
// Inject the input as a global variable.
i::Handle<i::String> input_name =
i::Factory::NewStringFromAscii(i::Vector<const char>("input", 5));
- i::Top::global_context()->global()->SetProperty(*input_name,
- *input_,
- NONE)->ToObjectChecked();
-
+ i::Top::global_context()->global()->SetProperty(
+ *input_name,
+ *input_,
+ NONE,
+ i::kNonStrictMode)->ToObjectChecked();
MorphThread morph_thread(this);
morph_thread.Start();
diff --git a/test/cctest/test-compiler.cc b/test/cctest/test-compiler.cc
index b424b7f..9f21b78 100644
--- a/test/cctest/test-compiler.cc
+++ b/test/cctest/test-compiler.cc
@@ -108,7 +108,7 @@
Handle<Object> object(value);
Handle<String> symbol = Factory::LookupAsciiSymbol(name);
Handle<JSObject> global(Top::context()->global());
- SetProperty(global, symbol, object, NONE);
+ SetProperty(global, symbol, object, NONE, kNonStrictMode);
}
diff --git a/test/cctest/test-debug.cc b/test/cctest/test-debug.cc
index 441aae6..7245e54 100644
--- a/test/cctest/test-debug.cc
+++ b/test/cctest/test-debug.cc
@@ -153,7 +153,8 @@
Handle<v8::internal::String> debug_string =
v8::internal::Factory::LookupAsciiSymbol("debug");
SetProperty(global, debug_string,
- Handle<Object>(Debug::debug_context()->global_proxy()), DONT_ENUM);
+ Handle<Object>(Debug::debug_context()->global_proxy()), DONT_ENUM,
+ ::v8::internal::kNonStrictMode);
}
private:
v8::Persistent<v8::Context> context_;
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index a23ee17..9cce01e 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -212,13 +212,14 @@
Handle<Map> initial_map =
Factory::NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
function->set_initial_map(*initial_map);
- Top::context()->global()->SetProperty(*name,
- *function,
- NONE)->ToObjectChecked();
+ Top::context()->global()->SetProperty(
+ *name, *function, NONE, kNonStrictMode)->ToObjectChecked();
// Allocate an object. Unrooted after leaving the scope.
Handle<JSObject> obj = Factory::NewJSObject(function);
- obj->SetProperty(*prop_name, Smi::FromInt(23), NONE)->ToObjectChecked();
- obj->SetProperty(*prop_namex, Smi::FromInt(24), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
+ obj->SetProperty(
+ *prop_namex, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
@@ -238,10 +239,10 @@
HandleScope inner_scope;
// Allocate another object, make it reachable from global.
Handle<JSObject> obj = Factory::NewJSObject(function);
- Top::context()->global()->SetProperty(*obj_name,
- *obj,
- NONE)->ToObjectChecked();
- obj->SetProperty(*prop_name, Smi::FromInt(23), NONE)->ToObjectChecked();
+ Top::context()->global()->SetProperty(
+ *obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
+ obj->SetProperty(
+ *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
}
// After gc, it should survive.
@@ -540,12 +541,12 @@
Handle<String> prop_name = Factory::LookupAsciiSymbol("theSlot");
Handle<JSObject> obj = Factory::NewJSObject(function);
- obj->SetProperty(*prop_name, Smi::FromInt(23), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
// Check that we can add properties to function objects.
- function->SetProperty(*prop_name,
- Smi::FromInt(24),
- NONE)->ToObjectChecked();
+ function->SetProperty(
+ *prop_name, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name));
}
@@ -567,7 +568,8 @@
CHECK(!obj->HasLocalProperty(*first));
// add first
- obj->SetProperty(*first, Smi::FromInt(1), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
CHECK(obj->HasLocalProperty(*first));
// delete first
@@ -575,8 +577,10 @@
CHECK(!obj->HasLocalProperty(*first));
// add first and then second
- obj->SetProperty(*first, Smi::FromInt(1), NONE)->ToObjectChecked();
- obj->SetProperty(*second, Smi::FromInt(2), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
+ obj->SetProperty(
+ *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
CHECK(obj->HasLocalProperty(*first));
CHECK(obj->HasLocalProperty(*second));
@@ -588,8 +592,10 @@
CHECK(!obj->HasLocalProperty(*second));
// add first and then second
- obj->SetProperty(*first, Smi::FromInt(1), NONE)->ToObjectChecked();
- obj->SetProperty(*second, Smi::FromInt(2), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
+ obj->SetProperty(
+ *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
CHECK(obj->HasLocalProperty(*first));
CHECK(obj->HasLocalProperty(*second));
@@ -603,14 +609,16 @@
// check string and symbol match
static const char* string1 = "fisk";
Handle<String> s1 = Factory::NewStringFromAscii(CStrVector(string1));
- obj->SetProperty(*s1, Smi::FromInt(1), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *s1, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
Handle<String> s1_symbol = Factory::LookupAsciiSymbol(string1);
CHECK(obj->HasLocalProperty(*s1_symbol));
// check symbol and string match
static const char* string2 = "fugl";
Handle<String> s2_symbol = Factory::LookupAsciiSymbol(string2);
- obj->SetProperty(*s2_symbol, Smi::FromInt(1), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *s2_symbol, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
Handle<String> s2 = Factory::NewStringFromAscii(CStrVector(string2));
CHECK(obj->HasLocalProperty(*s2));
}
@@ -631,7 +639,8 @@
Handle<JSObject> obj = Factory::NewJSObject(function);
// Set a propery
- obj->SetProperty(*prop_name, Smi::FromInt(23), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
// Check the map has changed
@@ -698,8 +707,10 @@
Handle<String> first = Factory::LookupAsciiSymbol("first");
Handle<String> second = Factory::LookupAsciiSymbol("second");
- obj->SetProperty(*first, Smi::FromInt(1), NONE)->ToObjectChecked();
- obj->SetProperty(*second, Smi::FromInt(2), NONE)->ToObjectChecked();
+ obj->SetProperty(
+ *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
+ obj->SetProperty(
+ *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
Object* ok = obj->SetElement(0, *first)->ToObjectChecked();
@@ -716,8 +727,10 @@
CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second));
// Flip the values.
- clone->SetProperty(*first, Smi::FromInt(2), NONE)->ToObjectChecked();
- clone->SetProperty(*second, Smi::FromInt(1), NONE)->ToObjectChecked();
+ clone->SetProperty(
+ *first, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
+ clone->SetProperty(
+ *second, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
ok = clone->SetElement(0, *second)->ToObjectChecked();
ok = clone->SetElement(1, *first)->ToObjectChecked();
diff --git a/test/cctest/test-mark-compact.cc b/test/cctest/test-mark-compact.cc
index 86f105f..3e3175e 100644
--- a/test/cctest/test-mark-compact.cc
+++ b/test/cctest/test-mark-compact.cc
@@ -189,7 +189,8 @@
function->set_initial_map(initial_map);
Top::context()->global()->SetProperty(func_name,
function,
- NONE)->ToObjectChecked();
+ NONE,
+ kNonStrictMode)->ToObjectChecked();
JSObject* obj =
JSObject::cast(Heap::AllocateJSObject(function)->ToObjectChecked());
@@ -208,10 +209,14 @@
String::cast(Heap::LookupAsciiSymbol("theObject")->ToObjectChecked());
Top::context()->global()->SetProperty(obj_name,
obj,
- NONE)->ToObjectChecked();
+ NONE,
+ kNonStrictMode)->ToObjectChecked();
String* prop_name =
String::cast(Heap::LookupAsciiSymbol("theSlot")->ToObjectChecked());
- obj->SetProperty(prop_name, Smi::FromInt(23), NONE)->ToObjectChecked();
+ obj->SetProperty(prop_name,
+ Smi::FromInt(23),
+ NONE,
+ kNonStrictMode)->ToObjectChecked();
Heap::CollectGarbage(OLD_POINTER_SPACE);
diff --git a/test/es5conform/es5conform.status b/test/es5conform/es5conform.status
index e021fc5..d6f7caf 100644
--- a/test/es5conform/es5conform.status
+++ b/test/es5conform/es5conform.status
@@ -269,72 +269,6 @@
# in strict mode (Global.length)
chapter11/11.13/11.13.1/11.13.1-4-4-s: FAIL
# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Object.length)
-chapter11/11.13/11.13.1/11.13.1-4-5-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Function.length)
-chapter11/11.13/11.13.1/11.13.1-4-6-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Array.length)
-chapter11/11.13/11.13.1/11.13.1-4-7-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (String.length)
-chapter11/11.13/11.13.1/11.13.1-4-8-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Boolean.length)
-chapter11/11.13/11.13.1/11.13.1-4-9-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Number.length)
-chapter11/11.13/11.13.1/11.13.1-4-10-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Date.length)
-chapter11/11.13/11.13.1/11.13.1-4-11-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (RegExp.length)
-chapter11/11.13/11.13.1/11.13.1-4-12-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Error.length)
-chapter11/11.13/11.13.1/11.13.1-4-13-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Number.MAX_VALUE)
-chapter11/11.13/11.13.1/11.13.1-4-14-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Number.MIN_VALUE)
-chapter11/11.13/11.13.1/11.13.1-4-15-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Number.NaN)
-chapter11/11.13/11.13.1/11.13.1-4-16-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Number.NEGATIVE_INFINITY)
-chapter11/11.13/11.13.1/11.13.1-4-17-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Number.POSITIVE_INFINITY)
-chapter11/11.13/11.13.1/11.13.1-4-18-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Math.E)
-chapter11/11.13/11.13.1/11.13.1-4-19-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Math.LN10)
-chapter11/11.13/11.13.1/11.13.1-4-20-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Math.LN2)
-chapter11/11.13/11.13.1/11.13.1-4-21-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Math.LOG2E)
-chapter11/11.13/11.13.1/11.13.1-4-22-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Math.LOG10E)
-chapter11/11.13/11.13.1/11.13.1-4-23-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Math.PI)
-chapter11/11.13/11.13.1/11.13.1-4-24-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Math.SQRT1_2)
-chapter11/11.13/11.13.1/11.13.1-4-25-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
-# in strict mode (Math.SQRT2)
-chapter11/11.13/11.13.1/11.13.1-4-26-s: FAIL
-# simple assignment throws TypeError if LeftHandSide is a readonly property
# in strict mode (Global.undefined)
chapter11/11.13/11.13.1/11.13.1-4-27-s: FAIL
diff --git a/test/mjsunit/array-join.js b/test/mjsunit/array-join.js
index c66e462..ddd1496 100644
--- a/test/mjsunit/array-join.js
+++ b/test/mjsunit/array-join.js
@@ -27,19 +27,44 @@
// Test that array join calls toString on subarrays.
var a = [[1,2],3,4,[5,6]];
+assertEquals('1,2345,6', a.join(''));
assertEquals('1,2*3*4*5,6', a.join('*'));
+assertEquals('1,2**3**4**5,6', a.join('**'));
+assertEquals('1,2****3****4****5,6', a.join('****'));
+assertEquals('1,2********3********4********5,6', a.join('********'));
+assertEquals('1,2**********3**********4**********5,6', a.join('**********'));
// Create a cycle.
a.push(a);
+assertEquals('1,2345,6', a.join(''));
assertEquals('1,2*3*4*5,6*', a.join('*'));
+assertEquals('1,2**3**4**5,6**', a.join('**'));
+assertEquals('1,2****3****4****5,6****', a.join('****'));
+assertEquals('1,2********3********4********5,6********', a.join('********'));
+assertEquals('1,2**********3**********4**********5,6**********', a.join('**********'));
// Replace array.prototype.toString.
Array.prototype.toString = function() { return "array"; }
+assertEquals('array34arrayarray', a.join(''));
assertEquals('array*3*4*array*array', a.join('*'));
+assertEquals('array**3**4**array**array', a.join('**'));
+assertEquals('array****3****4****array****array', a.join('****'));
+assertEquals('array********3********4********array********array', a.join('********'));
+assertEquals('array**********3**********4**********array**********array', a.join('**********'));
Array.prototype.toString = function() { throw 42; }
+assertThrows("a.join('')");
assertThrows("a.join('*')");
+assertThrows("a.join('**')");
+assertThrows("a.join('****')");
+assertThrows("a.join('********')");
+assertThrows("a.join('**********')");
Array.prototype.toString = function() { return "array"; }
+assertEquals('array34arrayarray', a.join(''));
assertEquals('array*3*4*array*array', a.join('*'));
+assertEquals('array**3**4**array**array', a.join('**'));
+assertEquals('array****3****4****array****array', a.join('****'));
+assertEquals('array********3********4********array********array', a.join('********'));
+assertEquals('array**********3**********4**********array**********array', a.join('**********'));
diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status
index c10281f..8f042ce 100644
--- a/test/mjsunit/mjsunit.status
+++ b/test/mjsunit/mjsunit.status
@@ -111,18 +111,6 @@
regress/regress-1132: SKIP
##############################################################################
-[ $arch == arm && $crankshaft ]
-
-# BUG (1094)
-regress/regress-deopt-gc: SKIP
-
-##############################################################################
-[ $arch == x64 && $crankshaft ]
-
-# BUG (1094)
-regress/regress-deopt-gc: SKIP
-
-##############################################################################
[ $arch == mips ]
# Skip all tests on MIPS.
diff --git a/test/mjsunit/override-eval-with-non-function.js b/test/mjsunit/override-eval-with-non-function.js
new file mode 100644
index 0000000..aa93b25
--- /dev/null
+++ b/test/mjsunit/override-eval-with-non-function.js
@@ -0,0 +1,36 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// When 'eval' is overridden with a non-function object we should
+// check whether the object is callable.
+
+function test() {
+ eval = /foo/;
+ assertEquals("foo", eval("foobar"));
+}
+
+test();
diff --git a/test/mjsunit/regress/regress-1207.js b/test/mjsunit/regress/regress-1207.js
new file mode 100644
index 0000000..102178a
--- /dev/null
+++ b/test/mjsunit/regress/regress-1207.js
@@ -0,0 +1,35 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test throwing an exception from instanceof.
+try {
+var object = { };
+function fib(n) {
+ var f0 = (object instanceof encodeURI)('#2: var x = 1; x <= 1 === true'), f1 = 1;
+}
+fib(75);
+} catch (o) { }
diff --git a/test/mjsunit/regress/regress-1209.js b/test/mjsunit/regress/regress-1209.js
new file mode 100644
index 0000000..c017fb5
--- /dev/null
+++ b/test/mjsunit/regress/regress-1209.js
@@ -0,0 +1,34 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function crashMe(n) {
+ var nasty = [];
+ while (n--)
+ nasty.push("a" + 0);
+ return Function.apply(null, nasty);
+}
+crashMe(64 + 1).length;
diff --git a/test/mjsunit/regress/regress-1210.js b/test/mjsunit/regress/regress-1210.js
new file mode 100644
index 0000000..9c708a5
--- /dev/null
+++ b/test/mjsunit/regress/regress-1210.js
@@ -0,0 +1,48 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Deoptimization of the key expression in an arguments access should see
+// the arguments object as the value of the receiver.
+
+var a = 0;
+
+function observe(x, y) { return x; }
+
+function side_effect(x) { a = x; }
+
+function test() {
+ // We will trigger deoptimization of 'a + 0' which should bail out to
+ // immediately after the call to 'side_effect' (i.e., still in the key
+ // subexpression of the arguments access).
+ return observe(a, arguments[side_effect(a), a + 0]);
+}
+
+// Run enough to optimize assuming global 'a' is a smi.
+for (var i = 0; i < 1000000; ++i) test(0);
+
+a = "hello";
+test(0);
diff --git a/test/mjsunit/regress/regress-1213.js b/test/mjsunit/regress/regress-1213.js
new file mode 100644
index 0000000..d66e3ce
--- /dev/null
+++ b/test/mjsunit/regress/regress-1213.js
@@ -0,0 +1,43 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we do not allow overwriting a global property with a
+// redeclaration that makes the property configurable (and hence
+// deletable).
+
+var x = 0;
+
+function TestGlobal() {
+ for (var i = 0; i < 2; i++) {
+ x = x + 1;
+ }
+ this.eval('function x() {};');
+ delete this['x'];
+}
+
+TestGlobal();
+TestGlobal();
diff --git a/test/mjsunit/regress/regress-1218.js b/test/mjsunit/regress/regress-1218.js
new file mode 100644
index 0000000..dd036ed
--- /dev/null
+++ b/test/mjsunit/regress/regress-1218.js
@@ -0,0 +1,29 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Builtin functions should not have prototype objects.
+assertFalse(Error.prototype.toString.hasOwnProperty("prototype"));
diff --git a/test/mjsunit/strict-mode.js b/test/mjsunit/strict-mode.js
index ab3e535..4071232 100644
--- a/test/mjsunit/strict-mode.js
+++ b/test/mjsunit/strict-mode.js
@@ -280,6 +280,19 @@
CheckStrictMode("function strict() { var x = --eval; }", SyntaxError);
CheckStrictMode("function strict() { var x = --arguments; }", SyntaxError);
+// Use of const in strict mode is disallowed in anticipation of ES Harmony.
+CheckStrictMode("const x = 0;", SyntaxError);
+CheckStrictMode("for (const x = 0; false;) {}", SyntaxError);
+CheckStrictMode("function strict() { const x = 0; }", SyntaxError);
+
+// Strict mode only allows functions in SourceElements
+CheckStrictMode("if (true) { function invalid() {} }", SyntaxError);
+CheckStrictMode("for (;false;) { function invalid() {} }", SyntaxError);
+CheckStrictMode("{ function invalid() {} }", SyntaxError);
+CheckStrictMode("try { function invalid() {} } catch(e) {}", SyntaxError);
+CheckStrictMode("try { } catch(e) { function invalid() {} }", SyntaxError);
+CheckStrictMode("function outer() {{ function invalid() {} }}", SyntaxError);
+
// Delete of an unqualified identifier
CheckStrictMode("delete unqualified;", SyntaxError);
CheckStrictMode("function strict() { delete unqualified; }", SyntaxError);
@@ -700,3 +713,118 @@
cleanup(Boolean);
}
})();
+
+
+(function ObjectEnvironment() {
+ var o = {};
+ Object.defineProperty(o, "foo", { value: "FOO", writable: false });
+ assertThrows(
+ function () {
+ with (o) {
+ (function() {
+ "use strict";
+ foo = "Hello";
+ })();
+ }
+ },
+ TypeError);
+})();
+
+
+(function TestSetPropertyWithoutSetter() {
+ var o = { get foo() { return "Yey"; } };
+ assertThrows(
+ function broken() {
+ "use strict";
+ o.foo = (0xBADBAD00 >> 1);
+ },
+ TypeError);
+})();
+
+
+(function TestSetPropertyNonConfigurable() {
+ var frozen = Object.freeze({});
+ var sealed = Object.seal({});
+
+ function strict(o) {
+ "use strict";
+ o.property = "value";
+ }
+
+ assertThrows(function() { strict(frozen); }, TypeError);
+ assertThrows(function() { strict(sealed); }, TypeError);
+})();
+
+
+(function TestAssignmentToReadOnlyProperty() {
+ "use strict";
+
+ var o = {};
+ Object.defineProperty(o, "property", { value: 7 });
+
+ assertThrows(function() { o.property = "new value"; }, TypeError);
+ assertThrows(function() { o.property += 10; }, TypeError);
+ assertThrows(function() { o.property -= 10; }, TypeError);
+ assertThrows(function() { o.property *= 10; }, TypeError);
+ assertThrows(function() { o.property /= 10; }, TypeError);
+ assertThrows(function() { o.property++; }, TypeError);
+ assertThrows(function() { o.property--; }, TypeError);
+ assertThrows(function() { ++o.property; }, TypeError);
+ assertThrows(function() { --o.property; }, TypeError);
+
+ var name = "prop" + "erty"; // to avoid symbol path.
+ assertThrows(function() { o[name] = "new value"; }, TypeError);
+ assertThrows(function() { o[name] += 10; }, TypeError);
+ assertThrows(function() { o[name] -= 10; }, TypeError);
+ assertThrows(function() { o[name] *= 10; }, TypeError);
+ assertThrows(function() { o[name] /= 10; }, TypeError);
+ assertThrows(function() { o[name]++; }, TypeError);
+ assertThrows(function() { o[name]--; }, TypeError);
+ assertThrows(function() { ++o[name]; }, TypeError);
+ assertThrows(function() { --o[name]; }, TypeError);
+
+ assertEquals(o.property, 7);
+})();
+
+
+(function TestAssignmentToReadOnlyLoop() {
+ var name = "prop" + "erty"; // to avoid symbol path.
+ var o = {};
+ Object.defineProperty(o, "property", { value: 7 });
+
+ function strict(o, name) {
+ "use strict";
+ o[name] = "new value";
+ }
+
+ for (var i = 0; i < 10; i ++) {
+ try {
+ strict(o, name);
+ assertUnreachable();
+ } catch(e) {
+ assertInstanceof(e, TypeError);
+ }
+ }
+})();
+
+
+// Specialized KeyedStoreIC experiencing miss.
+(function testKeyedStoreICStrict() {
+ var o = [9,8,7,6,5,4,3,2,1];
+
+ function test(o, i, v) {
+ "use strict";
+ o[i] = v;
+ }
+
+ for (var i = 0; i < 10; i ++) {
+ test(o, 5, 17); // start specialized for smi indices
+ assertEquals(o[5], 17);
+ test(o, "a", 19);
+ assertEquals(o["a"], 19);
+ test(o, "5", 29);
+ assertEquals(o[5], 29);
+ test(o, 100000, 31);
+ assertEquals(o[100000], 31);
+ }
+})();