Version 1.2.11.
Improved performance on IA-32 and ARM.
Fixed profiler sampler implementation on Mac OS X.
Changed the representation of global objects to improve performance of adding a lot of new properties.
git-svn-id: http://v8.googlecode.com/svn/trunk@2329 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 6ec8f46..d168577 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -491,6 +491,20 @@
}
+// We have to use the temporary register for things that can be relocated even
+// if they can be encoded in the ARM's 12 bits of immediate-offset instruction
+// space. There is no guarantee that the relocated location can be similarly
+// encoded.
+static bool MustUseIp(RelocInfo::Mode rmode) {
+ if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
+ return Serializer::enabled();
+ } else if (rmode == RelocInfo::NONE) {
+ return false;
+ }
+ return true;
+}
+
+
void Assembler::addrmod1(Instr instr,
Register rn,
Register rd,
@@ -501,8 +515,7 @@
// immediate
uint32_t rotate_imm;
uint32_t immed_8;
- if ((x.rmode_ != RelocInfo::NONE &&
- x.rmode_ != RelocInfo::EXTERNAL_REFERENCE) ||
+ if (MustUseIp(x.rmode_) ||
!fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) {
// The immediate operand cannot be encoded as a shifter operand, so load
// it first to register ip and change the original instruction to use ip.
@@ -816,7 +829,6 @@
void Assembler::mla(Register dst, Register src1, Register src2, Register srcA,
SBit s, Condition cond) {
ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc) && !srcA.is(pc));
- ASSERT(!dst.is(src1));
emit(cond | A | s | dst.code()*B16 | srcA.code()*B12 |
src2.code()*B8 | B7 | B4 | src1.code());
}
@@ -825,7 +837,6 @@
void Assembler::mul(Register dst, Register src1, Register src2,
SBit s, Condition cond) {
ASSERT(!dst.is(pc) && !src1.is(pc) && !src2.is(pc));
- ASSERT(!dst.is(src1));
emit(cond | s | dst.code()*B16 | src2.code()*B8 | B7 | B4 | src1.code());
}
@@ -837,7 +848,7 @@
SBit s,
Condition cond) {
ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc));
- ASSERT(!dstL.is(dstH) && !dstH.is(src1) && !src1.is(dstL));
+ ASSERT(!dstL.is(dstH));
emit(cond | B23 | B22 | A | s | dstH.code()*B16 | dstL.code()*B12 |
src2.code()*B8 | B7 | B4 | src1.code());
}
@@ -850,7 +861,7 @@
SBit s,
Condition cond) {
ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc));
- ASSERT(!dstL.is(dstH) && !dstH.is(src1) && !src1.is(dstL));
+ ASSERT(!dstL.is(dstH));
emit(cond | B23 | B22 | s | dstH.code()*B16 | dstL.code()*B12 |
src2.code()*B8 | B7 | B4 | src1.code());
}
@@ -863,7 +874,7 @@
SBit s,
Condition cond) {
ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc));
- ASSERT(!dstL.is(dstH) && !dstH.is(src1) && !src1.is(dstL));
+ ASSERT(!dstL.is(dstH));
emit(cond | B23 | A | s | dstH.code()*B16 | dstL.code()*B12 |
src2.code()*B8 | B7 | B4 | src1.code());
}
@@ -876,8 +887,8 @@
SBit s,
Condition cond) {
ASSERT(!dstL.is(pc) && !dstH.is(pc) && !src1.is(pc) && !src2.is(pc));
- ASSERT(!dstL.is(dstH) && !dstH.is(src1) && !src1.is(dstL));
- emit(cond | B23 | s | dstH.code()*B16 | dstL.code()*B12 |
+ ASSERT(!dstL.is(dstH));
+ emit(cond | B23 | B22 | s | dstH.code()*B16 | dstL.code()*B12 |
src2.code()*B8 | B7 | B4 | src1.code());
}
@@ -906,8 +917,7 @@
// immediate
uint32_t rotate_imm;
uint32_t immed_8;
- if ((src.rmode_ != RelocInfo::NONE &&
- src.rmode_ != RelocInfo::EXTERNAL_REFERENCE)||
+ if (MustUseIp(src.rmode_) ||
!fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) {
// immediate operand cannot be encoded, load it first to register ip
RecordRelocInfo(src.rmode_, src.imm32_);
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 6626619..5cc4824 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -41,6 +41,18 @@
#define __ ACCESS_MASM(masm_)
+static void EmitIdenticalObjectComparison(MacroAssembler* masm,
+ Label* slow,
+ Condition cc);
+static void EmitSmiNonsmiComparison(MacroAssembler* masm,
+ Label* rhs_not_nan,
+ Label* slow,
+ bool strict);
+static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc);
+static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm);
+
+
+
// -------------------------------------------------------------------------
// Platform-specific DeferredCode functions.
@@ -1002,7 +1014,13 @@
}
-void CodeGenerator::Comparison(Condition cc, bool strict) {
+void CodeGenerator::Comparison(Condition cc,
+ Expression* left,
+ Expression* right,
+ bool strict) {
+ if (left != NULL) LoadAndSpill(left);
+ if (right != NULL) LoadAndSpill(right);
+
VirtualFrame::SpilledScope spilled_scope;
// sp[0] : y
// sp[1] : x
@@ -1026,43 +1044,19 @@
__ tst(r2, Operand(kSmiTagMask));
smi.Branch(eq);
- // Perform non-smi comparison by runtime call.
- frame_->EmitPush(r1);
+ // Perform non-smi comparison by stub.
+ // CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
+ // We call with 0 args because there are 0 on the stack.
+ CompareStub stub(cc, strict);
+ frame_->CallStub(&stub, 0);
- // Figure out which native to call and setup the arguments.
- Builtins::JavaScript native;
- int arg_count = 1;
- if (cc == eq) {
- native = strict ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
- } else {
- native = Builtins::COMPARE;
- int ncr; // NaN compare result
- if (cc == lt || cc == le) {
- ncr = GREATER;
- } else {
- ASSERT(cc == gt || cc == ge); // remaining cases
- ncr = LESS;
- }
- frame_->EmitPush(r0);
- arg_count++;
- __ mov(r0, Operand(Smi::FromInt(ncr)));
- }
-
- // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
- // tagged as a small integer.
- frame_->EmitPush(r0);
- Result arg_count_register = allocator_->Allocate(r0);
- ASSERT(arg_count_register.is_valid());
- __ mov(arg_count_register.reg(), Operand(arg_count));
- Result result = frame_->InvokeBuiltin(native,
- CALL_JS,
- &arg_count_register,
- arg_count + 1);
+ Result result = allocator_->Allocate(r0);
+ ASSERT(result.is_valid());
__ cmp(result.reg(), Operand(0));
result.Unuse();
exit.Jump();
- // test smi equality by pointer comparison.
+ // Do smi comparisons by pointer comparison.
smi.Bind();
__ cmp(r1, Operand(r0));
@@ -1505,8 +1499,7 @@
// Duplicate TOS.
__ ldr(r0, frame_->Top());
frame_->EmitPush(r0);
- LoadAndSpill(clause->label());
- Comparison(eq, true);
+ Comparison(eq, NULL, clause->label(), true);
Branch(false, &next_test);
// Before entering the body from the test, remove the switch value from
@@ -3180,6 +3173,15 @@
}
+void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
+ VirtualFrame::SpilledScope spilled_scope;
+ ASSERT(args->length() == 1);
+ LoadAndSpill(args->at(0)); // Load the object.
+ frame_->CallRuntime(Runtime::kClassOf, 1);
+ frame_->EmitPush(r0);
+}
+
+
void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
VirtualFrame::SpilledScope spilled_scope;
ASSERT(args->length() == 1);
@@ -3290,6 +3292,14 @@
}
+void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
+ VirtualFrame::SpilledScope spilled_scope;
+ ASSERT(args->length() == 0);
+ frame_->CallRuntime(Runtime::kIsConstructCall, 0);
+ frame_->EmitPush(r0);
+}
+
+
void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
VirtualFrame::SpilledScope spilled_scope;
ASSERT(args->length() == 0);
@@ -3495,8 +3505,8 @@
case Token::SUB: {
bool overwrite =
- (node->AsBinaryOperation() != NULL &&
- node->AsBinaryOperation()->ResultOverwriteAllowed());
+ (node->expression()->AsBinaryOperation() != NULL &&
+ node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
UnarySubStub stub(overwrite);
frame_->CallStub(&stub, 0);
break;
@@ -3960,34 +3970,34 @@
return;
}
- LoadAndSpill(left);
- LoadAndSpill(right);
switch (op) {
case Token::EQ:
- Comparison(eq, false);
+ Comparison(eq, left, right, false);
break;
case Token::LT:
- Comparison(lt);
+ Comparison(lt, left, right);
break;
case Token::GT:
- Comparison(gt);
+ Comparison(gt, left, right);
break;
case Token::LTE:
- Comparison(le);
+ Comparison(le, left, right);
break;
case Token::GTE:
- Comparison(ge);
+ Comparison(ge, left, right);
break;
case Token::EQ_STRICT:
- Comparison(eq, true);
+ Comparison(eq, left, right, true);
break;
case Token::IN: {
+ LoadAndSpill(left);
+ LoadAndSpill(right);
Result arg_count = allocator_->Allocate(r0);
ASSERT(arg_count.is_valid());
__ mov(arg_count.reg(), Operand(1)); // not counting receiver
@@ -4000,6 +4010,8 @@
}
case Token::INSTANCEOF: {
+ LoadAndSpill(left);
+ LoadAndSpill(right);
InstanceofStub stub;
Result result = frame_->CallStub(&stub, 2);
// At this point if instanceof succeeded then r0 == 0.
@@ -4482,6 +4494,408 @@
}
+// Handle the case where the lhs and rhs are the same object.
+// Equality is almost reflexive (everything but NaN), so this is a test
+// for "identity and not NaN".
+static void EmitIdenticalObjectComparison(MacroAssembler* masm,
+ Label* slow,
+ Condition cc) {
+ Label not_identical;
+ __ cmp(r0, Operand(r1));
+ __ b(ne, ¬_identical);
+
+ Register exp_mask_reg = r5;
+ __ mov(exp_mask_reg, Operand(HeapNumber::kExponentMask));
+
+ // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
+ // so we do the second best thing - test it ourselves.
+ Label heap_number, return_equal;
+ // They are both equal and they are not both Smis so both of them are not
+ // Smis. If it's not a heap number, then return equal.
+ if (cc == lt || cc == gt) {
+ __ CompareObjectType(r0, r4, r4, FIRST_JS_OBJECT_TYPE);
+ __ b(ge, slow);
+ } else {
+ __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
+ __ b(eq, &heap_number);
+ // Comparing JS objects with <=, >= is complicated.
+ if (cc != eq) {
+ __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
+ __ b(ge, slow);
+ }
+ }
+ __ bind(&return_equal);
+ if (cc == lt) {
+ __ mov(r0, Operand(GREATER)); // Things aren't less than themselves.
+ } else if (cc == gt) {
+ __ mov(r0, Operand(LESS)); // Things aren't greater than themselves.
+ } else {
+ __ mov(r0, Operand(0)); // Things are <=, >=, ==, === themselves.
+ }
+ __ mov(pc, Operand(lr)); // Return.
+
+ // For less and greater we don't have to check for NaN since the result of
+ // x < x is false regardless. For the others here is some code to check
+ // for NaN.
+ if (cc != lt && cc != gt) {
+ __ bind(&heap_number);
+ // It is a heap number, so return non-equal if it's NaN and equal if it's
+ // not NaN.
+ // The representation of NaN values has all exponent bits (52..62) set,
+ // and not all mantissa bits (0..51) clear.
+ // Read top bits of double representation (second word of value).
+ __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
+ // Test that exponent bits are all set.
+ __ and_(r3, r2, Operand(exp_mask_reg));
+ __ cmp(r3, Operand(exp_mask_reg));
+ __ b(ne, &return_equal);
+
+ // Shift out flag and all exponent bits, retaining only mantissa.
+ __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord));
+ // Or with all low-bits of mantissa.
+ __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
+ __ orr(r0, r3, Operand(r2), SetCC);
+ // For equal we already have the right value in r0: Return zero (equal)
+ // if all bits in mantissa are zero (it's an Infinity) and non-zero if not
+ // (it's a NaN). For <= and >= we need to load r0 with the failing value
+ // if it's a NaN.
+ if (cc != eq) {
+ // All-zero means Infinity means equal.
+ __ mov(pc, Operand(lr), LeaveCC, eq); // Return equal
+ if (cc == le) {
+ __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail.
+ } else {
+ __ mov(r0, Operand(LESS)); // NaN >= NaN should fail.
+ }
+ }
+ __ mov(pc, Operand(lr)); // Return.
+ }
+ // No fall through here.
+
+ __ bind(¬_identical);
+}
+
+
+// See comment at call site.
+static void EmitSmiNonsmiComparison(MacroAssembler* masm,
+ Label* rhs_not_nan,
+ Label* slow,
+ bool strict) {
+ Label lhs_is_smi;
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(eq, &lhs_is_smi);
+
+ // Rhs is a Smi. Check whether the non-smi is a heap number.
+ __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
+ if (strict) {
+ // If lhs was not a number and rhs was a Smi then strict equality cannot
+ // succeed. Return non-equal (r0 is already not zero)
+ __ mov(pc, Operand(lr), LeaveCC, ne); // Return.
+ } else {
+ // Smi compared non-strictly with a non-Smi non-heap-number. Call
+ // the runtime.
+ __ b(ne, slow);
+ }
+
+ // Rhs is a smi, lhs is a number.
+ __ push(lr);
+ __ mov(r7, Operand(r1));
+ ConvertToDoubleStub stub1(r3, r2, r7, r6);
+ __ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
+ // r3 and r2 are rhs as double.
+ __ ldr(r1, FieldMemOperand(r0, HeapNumber::kValueOffset + kPointerSize));
+ __ ldr(r0, FieldMemOperand(r0, HeapNumber::kValueOffset));
+ // We now have both loaded as doubles but we can skip the lhs nan check
+ // since it's a Smi.
+ __ pop(lr);
+ __ jmp(rhs_not_nan);
+
+ __ bind(&lhs_is_smi);
+ // Lhs is a Smi. Check whether the non-smi is a heap number.
+ __ CompareObjectType(r1, r4, r4, HEAP_NUMBER_TYPE);
+ if (strict) {
+ // If lhs was not a number and rhs was a Smi then strict equality cannot
+ // succeed. Return non-equal.
+ __ mov(r0, Operand(1), LeaveCC, ne); // Non-zero indicates not equal.
+ __ mov(pc, Operand(lr), LeaveCC, ne); // Return.
+ } else {
+ // Smi compared non-strictly with a non-Smi non-heap-number. Call
+ // the runtime.
+ __ b(ne, slow);
+ }
+
+ // Lhs is a smi, rhs is a number.
+ // r0 is Smi and r1 is heap number.
+ __ push(lr);
+ __ ldr(r2, FieldMemOperand(r1, HeapNumber::kValueOffset));
+ __ ldr(r3, FieldMemOperand(r1, HeapNumber::kValueOffset + kPointerSize));
+ __ mov(r7, Operand(r0));
+ ConvertToDoubleStub stub2(r1, r0, r7, r6);
+ __ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
+ __ pop(lr);
+ // Fall through to both_loaded_as_doubles.
+}
+
+
+void EmitNanCheck(MacroAssembler* masm, Label* rhs_not_nan, Condition cc) {
+ bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
+ Register lhs_exponent = exp_first ? r0 : r1;
+ Register rhs_exponent = exp_first ? r2 : r3;
+ Register lhs_mantissa = exp_first ? r1 : r0;
+ Register rhs_mantissa = exp_first ? r3 : r2;
+ Label one_is_nan, neither_is_nan;
+
+ Register exp_mask_reg = r5;
+
+ __ mov(exp_mask_reg, Operand(HeapNumber::kExponentMask));
+ __ and_(r4, rhs_exponent, Operand(exp_mask_reg));
+ __ cmp(r4, Operand(exp_mask_reg));
+ __ b(ne, rhs_not_nan);
+ __ mov(r4,
+ Operand(rhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord),
+ SetCC);
+ __ b(ne, &one_is_nan);
+ __ cmp(rhs_mantissa, Operand(0));
+ __ b(ne, &one_is_nan);
+
+ __ bind(rhs_not_nan);
+ __ mov(exp_mask_reg, Operand(HeapNumber::kExponentMask));
+ __ and_(r4, lhs_exponent, Operand(exp_mask_reg));
+ __ cmp(r4, Operand(exp_mask_reg));
+ __ b(ne, &neither_is_nan);
+ __ mov(r4,
+ Operand(lhs_exponent, LSL, HeapNumber::kNonMantissaBitsInTopWord),
+ SetCC);
+ __ b(ne, &one_is_nan);
+ __ cmp(lhs_mantissa, Operand(0));
+ __ b(eq, &neither_is_nan);
+
+ __ bind(&one_is_nan);
+ // NaN comparisons always fail.
+ // Load whatever we need in r0 to make the comparison fail.
+ if (cc == lt || cc == le) {
+ __ mov(r0, Operand(GREATER));
+ } else {
+ __ mov(r0, Operand(LESS));
+ }
+ __ mov(pc, Operand(lr)); // Return.
+
+ __ bind(&neither_is_nan);
+}
+
+
+// See comment at call site.
+static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
+ bool exp_first = (HeapNumber::kExponentOffset == HeapNumber::kValueOffset);
+ Register lhs_exponent = exp_first ? r0 : r1;
+ Register rhs_exponent = exp_first ? r2 : r3;
+ Register lhs_mantissa = exp_first ? r1 : r0;
+ Register rhs_mantissa = exp_first ? r3 : r2;
+
+ // r0, r1, r2, r3 have the two doubles. Neither is a NaN.
+ if (cc == eq) {
+ // Doubles are not equal unless they have the same bit pattern.
+ // Exception: 0 and -0.
+ __ cmp(lhs_mantissa, Operand(rhs_mantissa));
+ __ orr(r0, lhs_mantissa, Operand(rhs_mantissa), LeaveCC, ne);
+ // Return non-zero if the numbers are unequal.
+ __ mov(pc, Operand(lr), LeaveCC, ne);
+
+ __ sub(r0, lhs_exponent, Operand(rhs_exponent), SetCC);
+ // If exponents are equal then return 0.
+ __ mov(pc, Operand(lr), LeaveCC, eq);
+
+ // Exponents are unequal. The only way we can return that the numbers
+ // are equal is if one is -0 and the other is 0. We already dealt
+ // with the case where both are -0 or both are 0.
+ // We start by seeing if the mantissas (that are equal) or the bottom
+ // 31 bits of the rhs exponent are non-zero. If so we return not
+ // equal.
+ __ orr(r4, rhs_mantissa, Operand(rhs_exponent, LSL, kSmiTagSize), SetCC);
+ __ mov(r0, Operand(r4), LeaveCC, ne);
+ __ mov(pc, Operand(lr), LeaveCC, ne); // Return conditionally.
+ // Now they are equal if and only if the lhs exponent is zero in its
+ // low 31 bits.
+ __ mov(r0, Operand(lhs_exponent, LSL, kSmiTagSize));
+ __ mov(pc, Operand(lr));
+ } else {
+ // Call a native function to do a comparison between two non-NaNs.
+ // Call C routine that may not cause GC or other trouble.
+ __ mov(r5, Operand(ExternalReference::compare_doubles()));
+ __ Jump(r5); // Tail call.
+ }
+}
+
+
+// See comment at call site.
+static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm) {
+ // If either operand is a JSObject or an oddball value, then they are
+ // not equal since their pointers are different.
+ // There is no test for undetectability in strict equality.
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ Label first_non_object;
+ // Get the type of the first operand into r2 and compare it with
+ // FIRST_JS_OBJECT_TYPE.
+ __ CompareObjectType(r0, r2, r2, FIRST_JS_OBJECT_TYPE);
+ __ b(lt, &first_non_object);
+
+ // Return non-zero (r0 is not zero)
+ Label return_not_equal;
+ __ bind(&return_not_equal);
+ __ mov(pc, Operand(lr)); // Return.
+
+ __ bind(&first_non_object);
+ // Check for oddballs: true, false, null, undefined.
+ __ cmp(r2, Operand(ODDBALL_TYPE));
+ __ b(eq, &return_not_equal);
+
+ __ CompareObjectType(r1, r3, r3, FIRST_JS_OBJECT_TYPE);
+ __ b(ge, &return_not_equal);
+
+ // Check for oddballs: true, false, null, undefined.
+ __ cmp(r3, Operand(ODDBALL_TYPE));
+ __ b(eq, &return_not_equal);
+}
+
+
+// See comment at call site.
+static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
+ Label* both_loaded_as_doubles,
+ Label* not_heap_numbers,
+ Label* slow) {
+ __ CompareObjectType(r0, r2, r2, HEAP_NUMBER_TYPE);
+ __ b(ne, not_heap_numbers);
+ __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
+ __ b(ne, slow); // First was a heap number, second wasn't. Go slow case.
+
+ // Both are heap numbers. Load them up then jump to the code we have
+ // for that.
+ __ ldr(r2, FieldMemOperand(r1, HeapNumber::kValueOffset));
+ __ ldr(r3, FieldMemOperand(r1, HeapNumber::kValueOffset + kPointerSize));
+ __ ldr(r1, FieldMemOperand(r0, HeapNumber::kValueOffset + kPointerSize));
+ __ ldr(r0, FieldMemOperand(r0, HeapNumber::kValueOffset));
+ __ jmp(both_loaded_as_doubles);
+}
+
+
+// Fast negative check for symbol-to-symbol equality.
+static void EmitCheckForSymbols(MacroAssembler* masm, Label* slow) {
+ // r2 is object type of r0.
+ __ tst(r2, Operand(kIsNotStringMask));
+ __ b(ne, slow);
+ __ tst(r2, Operand(kIsSymbolMask));
+ __ b(eq, slow);
+ __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
+ __ b(ge, slow);
+ __ tst(r3, Operand(kIsSymbolMask));
+ __ b(eq, slow);
+
+ // Both are symbols. We already checked they weren't the same pointer
+ // so they are not equal.
+ __ mov(r0, Operand(1)); // Non-zero indicates not equal.
+ __ mov(pc, Operand(lr)); // Return.
+}
+
+
+// On entry r0 and r1 are the things to be compared. On exit r0 is 0,
+// positive or negative to indicate the result of the comparison.
+void CompareStub::Generate(MacroAssembler* masm) {
+ Label slow; // Call builtin.
+ Label not_smis, both_loaded_as_doubles, rhs_not_nan;
+
+ // NOTICE! This code is only reached after a smi-fast-case check, so
+ // it is certain that at least one operand isn't a smi.
+
+ // Handle the case where the objects are identical. Either returns the answer
+ // or goes to slow. Only falls through if the objects were not identical.
+ EmitIdenticalObjectComparison(masm, &slow, cc_);
+
+ // If either is a Smi (we know that not both are), then they can only
+ // be strictly equal if the other is a HeapNumber.
+ ASSERT_EQ(0, kSmiTag);
+ ASSERT_EQ(0, Smi::FromInt(0));
+ __ and_(r2, r0, Operand(r1));
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(ne, ¬_smis);
+ // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
+ // 1) Return the answer.
+ // 2) Go to slow.
+ // 3) Fall through to both_loaded_as_doubles.
+ // 4) Jump to rhs_not_nan.
+ // In cases 3 and 4 we have found out we were dealing with a number-number
+ // comparison and the numbers have been loaded into r0, r1, r2, r3 as doubles.
+ EmitSmiNonsmiComparison(masm, &rhs_not_nan, &slow, strict_);
+
+ __ bind(&both_loaded_as_doubles);
+ // r0, r1, r2, r3 are the double representations of the left hand side
+ // and the right hand side.
+
+ // Checks for NaN in the doubles we have loaded. Can return the answer or
+ // fall through if neither is a NaN. Also binds rhs_not_nan.
+ EmitNanCheck(masm, &rhs_not_nan, cc_);
+
+ // Compares two doubles in r0, r1, r2, r3 that are not NaNs. Returns the
+ // answer. Never falls through.
+ EmitTwoNonNanDoubleComparison(masm, cc_);
+
+ __ bind(¬_smis);
+ // At this point we know we are dealing with two different objects,
+ // and neither of them is a Smi. The objects are in r0 and r1.
+ if (strict_) {
+ // This returns non-equal for some object types, or falls through if it
+ // was not lucky.
+ EmitStrictTwoHeapObjectCompare(masm);
+ }
+
+ Label check_for_symbols;
+ // Check for heap-number-heap-number comparison. Can jump to slow case,
+ // or load both doubles into r0, r1, r2, r3 and jump to the code that handles
+ // that case. If the inputs are not doubles then jumps to check_for_symbols.
+ // In this case r2 will contain the type of r0.
+ EmitCheckForTwoHeapNumbers(masm,
+ &both_loaded_as_doubles,
+ &check_for_symbols,
+ &slow);
+
+ __ bind(&check_for_symbols);
+ if (cc_ == eq) {
+ // Either jumps to slow or returns the answer. Assumes that r2 is the type
+ // of r0 on entry.
+ EmitCheckForSymbols(masm, &slow);
+ }
+
+ __ bind(&slow);
+ __ push(lr);
+ __ push(r1);
+ __ push(r0);
+ // Figure out which native to call and setup the arguments.
+ Builtins::JavaScript native;
+ int arg_count = 1; // Not counting receiver.
+ if (cc_ == eq) {
+ native = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
+ } else {
+ native = Builtins::COMPARE;
+ int ncr; // NaN compare result
+ if (cc_ == lt || cc_ == le) {
+ ncr = GREATER;
+ } else {
+ ASSERT(cc_ == gt || cc_ == ge); // remaining cases
+ ncr = LESS;
+ }
+ arg_count++;
+ __ mov(r0, Operand(Smi::FromInt(ncr)));
+ __ push(r0);
+ }
+
+ // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
+ // tagged as a small integer.
+ __ mov(r0, Operand(arg_count));
+ __ InvokeBuiltin(native, CALL_JS);
+ __ cmp(r0, Operand(0));
+ __ pop(pc);
+}
+
+
// Allocates a heap number or jumps to the label if the young space is full and
// a scavenge is needed.
static void AllocateHeapNumber(
@@ -4538,7 +4952,8 @@
// The new heap number is in r5. r6 and r7 are scratch.
AllocateHeapNumber(masm, &slow, r5, r6, r7);
// Write Smi from r0 to r3 and r2 in double format. r6 is scratch.
- ConvertToDoubleStub stub1(r3, r2, r0, r6);
+ __ mov(r7, Operand(r0));
+ ConvertToDoubleStub stub1(r3, r2, r7, r6);
__ push(lr);
__ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
// Write Smi from r1 to r1 and r0 in double format. r6 is scratch.
@@ -4919,7 +5334,13 @@
__ tst(r3, Operand(r3));
__ mov(r0, Operand(r3), LeaveCC, ne);
__ Ret(ne);
- // Slow case.
+ // We need -0 if we were multiplying a negative number with 0 to get 0.
+ // We know one of them was zero.
+ __ add(r2, r0, Operand(r1), SetCC);
+ __ mov(r0, Operand(Smi::FromInt(0)), LeaveCC, pl);
+ __ Ret(pl); // Return Smi 0 if the non-zero one was positive.
+ // Slow case. We fall through here if we multiplied a negative number
+ // with 0, because that would mean we should produce -0.
__ bind(&slow);
HandleBinaryOpSlowCases(masm,
@@ -5015,7 +5436,6 @@
void UnarySubStub::Generate(MacroAssembler* masm) {
Label undo;
Label slow;
- Label done;
Label not_smi;
// Enter runtime system if the value is not a smi.
@@ -5041,9 +5461,6 @@
__ mov(r0, Operand(0)); // Set number of arguments.
__ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
- __ bind(&done);
- __ StubReturn(1);
-
__ bind(¬_smi);
__ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
__ b(ne, &slow);
@@ -5203,9 +5620,9 @@
// support moving the C entry code stub. This should be fixed, but currently
// this is OK because the CEntryStub gets generated so early in the V8 boot
// sequence that it is not moving ever.
- __ add(lr, pc, Operand(4)); // compute return address: (pc + 8) + 4
- __ push(lr);
- __ Jump(r5);
+ masm->add(lr, pc, Operand(4)); // compute return address: (pc + 8) + 4
+ masm->push(lr);
+ masm->Jump(r5);
if (always_allocate) {
// It's okay to clobber r2 and r3 here. Don't mess with r0 and r1
@@ -5629,6 +6046,13 @@
}
+int CompareStub::MinorKey() {
+ // Encode the two parameters in a unique 16 bit value.
+ ASSERT(static_cast<unsigned>(cc_) >> 28 < (1 << 15));
+ return (static_cast<unsigned>(cc_) >> 27) | (strict_ ? 1 : 0);
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/arm/codegen-arm.h b/src/arm/codegen-arm.h
index 4fab900..7760e47 100644
--- a/src/arm/codegen-arm.h
+++ b/src/arm/codegen-arm.h
@@ -292,7 +292,10 @@
void ToBoolean(JumpTarget* true_target, JumpTarget* false_target);
void GenericBinaryOperation(Token::Value op, OverwriteMode overwrite_mode);
- void Comparison(Condition cc, bool strict = false);
+ void Comparison(Condition cc,
+ Expression* left,
+ Expression* right,
+ bool strict = false);
void SmiOperation(Token::Value op,
Handle<Object> value,
@@ -333,11 +336,15 @@
void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
void GenerateIsArray(ZoneList<Expression*>* args);
+ // Support for construct call checks.
+ void GenerateIsConstructCall(ZoneList<Expression*>* args);
+
// Support for arguments.length and arguments[?].
void GenerateArgumentsLength(ZoneList<Expression*>* args);
void GenerateArgumentsAccess(ZoneList<Expression*>* args);
- // Support for accessing the value field of an object (used by Date).
+ // Support for accessing the class and value fields of an object.
+ void GenerateClassOf(ZoneList<Expression*>* args);
void GenerateValueOf(ZoneList<Expression*>* args);
void GenerateSetValueOf(ZoneList<Expression*>* args);
diff --git a/src/arm/constants-arm.h b/src/arm/constants-arm.h
index d5f967f..f0311df 100644
--- a/src/arm/constants-arm.h
+++ b/src/arm/constants-arm.h
@@ -36,6 +36,19 @@
# define USE_ARM_EABI 1
#endif
+// This means that interwork-compatible jump instructions are generated. We
+// want to generate them on the simulator too so it makes snapshots that can
+// be used on real hardware.
+#if defined(__THUMB_INTERWORK__) || !defined(__arm__)
+# define USE_THUMB_INTERWORK 1
+#endif
+
+// Simulator should support ARM5 instructions.
+#if !defined(__arm__)
+# define __ARM_ARCH_5__ 1
+# define __ARM_ARCH_5T__ 1
+#endif
+
namespace assembler {
namespace arm {
@@ -97,6 +110,24 @@
};
+// Some special instructions encoded as a TEQ with S=0 (bit 20).
+enum Opcode9Bits {
+ BX = 1,
+ BXJ = 2,
+ BLX = 3,
+ BKPT = 7
+};
+
+
+// Some special instructions encoded as a CMN with S=0 (bit 20).
+enum Opcode11Bits {
+ CLZ = 1
+};
+
+
+// S
+
+
// Shifter types for Data-processing operands as defined in section A5.1.2.
enum Shift {
no_shift = -1,
diff --git a/src/arm/disasm-arm.cc b/src/arm/disasm-arm.cc
index 8083ce3..588732b 100644
--- a/src/arm/disasm-arm.cc
+++ b/src/arm/disasm-arm.cc
@@ -438,7 +438,7 @@
return 6;
}
case 'u': { // 'u: signed or unsigned multiplies
- if (instr->Bit(22) == 0) {
+ if (instr->Bit(22) == 1) {
Print("u");
} else {
Print("s");
@@ -499,7 +499,7 @@
Format(instr, "mla'cond's 'rd, 'rm, 'rs, 'rn");
}
} else {
- Format(instr, "'um'al'cond's 'rn, 'rd, 'rs, 'rm");
+ Format(instr, "'um'al'cond's 'rn, 'rd, 'rm, 'rs");
}
} else {
Unknown(instr); // not used by V8
@@ -593,7 +593,17 @@
if (instr->HasS()) {
Format(instr, "teq'cond 'rn, 'shift_op");
} else {
- Unknown(instr); // not used by V8
+ switch (instr->Bits(7, 4)) {
+ case BX:
+ Format(instr, "bx'cond 'rm");
+ break;
+ case BLX:
+ Format(instr, "blx'cond 'rm");
+ break;
+ default:
+ Unknown(instr); // not used by V8
+ break;
+ }
}
break;
}
@@ -609,7 +619,14 @@
if (instr->HasS()) {
Format(instr, "cmn'cond 'rn, 'shift_op");
} else {
- Unknown(instr); // not used by V8
+ switch (instr->Bits(7, 4)) {
+ case CLZ:
+ Format(instr, "clz'cond 'rd, 'rm");
+ break;
+ default:
+ Unknown(instr); // not used by V8
+ break;
+ }
}
break;
}
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 8b4e087..5519771 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -67,11 +67,15 @@
// Load the map into t0.
__ ldr(t0, FieldMemOperand(t1, JSObject::kMapOffset));
// Test the has_named_interceptor bit in the map.
- __ ldr(t0, FieldMemOperand(t1, Map::kInstanceAttributesOffset));
- __ tst(t0, Operand(1 << (Map::kHasNamedInterceptor + (3 * 8))));
+ __ ldr(r3, FieldMemOperand(t0, Map::kInstanceAttributesOffset));
+ __ tst(r3, Operand(1 << (Map::kHasNamedInterceptor + (3 * 8))));
// Jump to miss if the interceptor bit is set.
__ b(ne, miss);
+ // Bail out if we have a JS global object.
+ __ ldrb(r3, FieldMemOperand(t0, Map::kInstanceTypeOffset));
+ __ cmp(r3, Operand(JS_GLOBAL_OBJECT_TYPE));
+ __ b(eq, miss);
// Check that the properties array is a dictionary.
__ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset));
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index 897b5a7..3d6b8cb 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -46,14 +46,14 @@
// We always generate arm code, never thumb code, even if V8 is compiled to
// thumb, so we require inter-working support
-#if defined(__thumb__) && !defined(__THUMB_INTERWORK__)
+#if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
#error "flag -mthumb-interwork missing"
#endif
// We do not support thumb inter-working with an arm architecture not supporting
// the blx instruction (below v5t)
-#if defined(__THUMB_INTERWORK__)
+#if defined(USE_THUMB_INTERWORK)
#if !defined(__ARM_ARCH_5T__) && \
!defined(__ARM_ARCH_5TE__) && \
!defined(__ARM_ARCH_7A__) && \
@@ -65,12 +65,12 @@
// Using blx may yield better code, so use it when required or when available
-#if defined(__THUMB_INTERWORK__) || defined(__ARM_ARCH_5__)
+#if defined(USE_THUMB_INTERWORK) || defined(__ARM_ARCH_5__)
#define USE_BLX 1
#endif
// Using bx does not yield better code, so use it only when required
-#if defined(__THUMB_INTERWORK__)
+#if defined(USE_THUMB_INTERWORK)
#define USE_BX 1
#endif
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index af4f28e..53dbec9 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -1046,6 +1046,9 @@
int64_t result = target(arg0, arg1, arg2, arg3);
int32_t lo_res = static_cast<int32_t>(result);
int32_t hi_res = static_cast<int32_t>(result >> 32);
+ if (::v8::internal::FLAG_trace_sim) {
+ PrintF("Returned %08x\n", lo_res);
+ }
set_register(r0, lo_res);
set_register(r1, hi_res);
set_register(r0, result);
@@ -1357,7 +1360,21 @@
SetNZFlags(alu_out);
SetCFlag(shifter_carry_out);
} else {
- UNIMPLEMENTED();
+ ASSERT(type == 0);
+ int rm = instr->RmField();
+ switch (instr->Bits(7, 4)) {
+ case BX:
+ set_pc(get_register(rm));
+ break;
+ case BLX: {
+ uint32_t old_pc = get_pc();
+ set_pc(get_register(rm));
+ set_register(lr, old_pc + Instr::kInstrSize);
+ break;
+ }
+ default:
+ UNIMPLEMENTED();
+ }
}
break;
}
@@ -1381,7 +1398,27 @@
Format(instr, "cmn'cond 'rn, 'shift_rm");
Format(instr, "cmn'cond 'rn, 'imm");
} else {
- UNIMPLEMENTED();
+ ASSERT(type == 0);
+ int rm = instr->RmField();
+ int rd = instr->RdField();
+ switch (instr->Bits(7, 4)) {
+ case CLZ: {
+ uint32_t bits = get_register(rm);
+ int leading_zeros = 0;
+ if (bits == 0) {
+ leading_zeros = 32;
+ } else {
+ while ((bits & 0x80000000u) == 0) {
+ bits <<= 1;
+ leading_zeros++;
+ }
+ }
+ set_register(rd, leading_zeros);
+ break;
+ }
+ default:
+ UNIMPLEMENTED();
+ }
}
break;
}
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index 7824557..44e6478 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -496,9 +496,7 @@
Object* CallStubCompiler::CompileCallField(Object* object,
JSObject* holder,
int index,
- String* name,
- Code::Flags flags) {
- ASSERT_EQ(FIELD, Code::ExtractTypeFromFlags(flags));
+ String* name) {
// ----------- S t a t e -------------
// -- lr: return address
// -----------------------------------
@@ -540,16 +538,14 @@
__ Jump(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCodeWithFlags(flags, name);
+ return GetCode(FIELD, name);
}
Object* CallStubCompiler::CompileCallConstant(Object* object,
JSObject* holder,
JSFunction* function,
- CheckType check,
- Code::Flags flags) {
- ASSERT_EQ(CONSTANT_FUNCTION, Code::ExtractTypeFromFlags(flags));
+ CheckType check) {
// ----------- S t a t e -------------
// -- lr: return address
// -----------------------------------
@@ -648,6 +644,7 @@
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
Handle<Code> code(function->code());
ParameterCount expected(function->shared()->formal_parameter_count());
__ InvokeCode(code, expected, arguments(),
@@ -663,7 +660,7 @@
if (function->shared()->name()->IsString()) {
function_name = String::cast(function->shared()->name());
}
- return GetCodeWithFlags(flags, function_name);
+ return GetCode(CONSTANT_FUNCTION, function_name);
}
@@ -687,6 +684,61 @@
}
+Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ JSFunction* function,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- lr: return address
+ // -----------------------------------
+ Label miss;
+
+ __ IncrementCounter(&Counters::call_global_inline, 1, r1, r3);
+
+ // Get the number of arguments.
+ const int argc = arguments().immediate();
+
+ // Check that the map of the global has not changed.
+ __ ldr(r2, MemOperand(sp, argc * kPointerSize));
+ __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ cmp(r3, Operand(Handle<Map>(object->map())));
+ __ b(ne, &miss);
+
+ // Get the value from the cell.
+ __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
+ __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
+
+ // Check that the cell contains the same function.
+ __ cmp(r1, Operand(Handle<JSFunction>(function)));
+ __ b(ne, &miss);
+
+ // Patch the receiver on the stack with the global proxy if
+ // necessary.
+ __ ldr(r3, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
+ __ str(r3, MemOperand(sp, argc * kPointerSize));
+
+ // Setup the context (function already in r1).
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
+
+ // Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ __ InvokeCode(code, expected, arguments(),
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+
+ // Handle call cache miss.
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::call_global_inline, 1, r1, r3);
+ __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
Object* StoreStubCompiler::CompileStoreField(JSObject* object,
int index,
Map* transition,
@@ -827,6 +879,45 @@
}
+Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- r0 : value
+ // -- r2 : name
+ // -- lr : return address
+ // -- [sp] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ IncrementCounter(&Counters::named_store_global_inline, 1, r1, r3);
+
+ // Check that the map of the global has not changed.
+ __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
+ __ cmp(r3, Operand(Handle<Map>(object->map())));
+ __ b(ne, &miss);
+
+ // Store the value in the cell.
+ __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
+ __ str(r0, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+ __ mov(r1, Operand(JSGlobalPropertyCell::kValueOffset));
+ __ RecordWrite(r2, r1, r3);
+
+ __ Ret();
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::named_store_global_inline, 1, r1, r3);
+ __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r1, r3);
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
Object* LoadStubCompiler::CompileLoadField(JSObject* object,
JSObject* holder,
int index,
@@ -921,6 +1012,47 @@
}
+Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ String* name,
+ bool is_dont_delete) {
+ // ----------- S t a t e -------------
+ // -- r2 : name
+ // -- lr : return address
+ // -- [sp] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3);
+
+ // Check that the map of the global has not changed.
+ __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
+ __ cmp(r3, Operand(Handle<Map>(object->map())));
+ __ b(ne, &miss);
+
+ // Get the value from the cell.
+ __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
+ __ ldr(r0, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
+
+ // Check for deleted property if property can actually be deleted.
+ if (!is_dont_delete) {
+ __ cmp(r0, Operand(Factory::the_hole_value()));
+ __ b(eq, &miss);
+ }
+
+ __ Ret();
+
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::named_load_global_inline, 1, r1, r3);
+ __ IncrementCounter(&Counters::named_load_global_inline_miss, 1, r1, r3);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
// TODO(1224671): IC stubs for keyed loads have not been implemented
// for ARM.
Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
diff --git a/src/assembler.cc b/src/assembler.cc
index 7b7778c..9497be8 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -608,6 +608,12 @@
}
+static int native_compare_doubles(double x, double y) {
+ if (x == y) return 0;
+ return x < y ? 1 : -1;
+}
+
+
ExternalReference ExternalReference::double_fp_operation(
Token::Value operation) {
typedef double BinaryFPOperation(double x, double y);
@@ -630,6 +636,12 @@
}
+ExternalReference ExternalReference::compare_doubles() {
+ return ExternalReference(Redirect(FUNCTION_ADDR(native_compare_doubles),
+ false));
+}
+
+
ExternalReferenceRedirector* ExternalReference::redirector_ = NULL;
diff --git a/src/assembler.h b/src/assembler.h
index 979dd90..879ee54 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -413,6 +413,7 @@
static ExternalReference new_space_allocation_limit_address();
static ExternalReference double_fp_operation(Token::Value operation);
+ static ExternalReference compare_doubles();
Address address() const {return reinterpret_cast<Address>(address_);}
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 3810c6a..f3c7c5f 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -539,7 +539,7 @@
{ // --- G l o b a l ---
// Step 1: create a fresh inner JSGlobalObject
- Handle<JSGlobalObject> object;
+ Handle<GlobalObject> object;
{
Handle<JSFunction> js_global_function;
Handle<ObjectTemplateInfo> js_global_template;
@@ -579,9 +579,7 @@
}
js_global_function->initial_map()->set_is_hidden_prototype();
- SetExpectedNofProperties(js_global_function, 100);
- object = Handle<JSGlobalObject>::cast(
- Factory::NewJSObject(js_global_function, TENURED));
+ object = Factory::NewGlobalObject(js_global_function);
}
// Set the global context for the global object.
@@ -963,12 +961,10 @@
Handle<String> name = Factory::LookupAsciiSymbol("builtins");
builtins_fun->shared()->set_instance_class_name(*name);
- SetExpectedNofProperties(builtins_fun, 100);
// Allocate the builtins object.
Handle<JSBuiltinsObject> builtins =
- Handle<JSBuiltinsObject>::cast(Factory::NewJSObject(builtins_fun,
- TENURED));
+ Handle<JSBuiltinsObject>::cast(Factory::NewGlobalObject(builtins_fun));
builtins->set_builtins(*builtins);
builtins->set_global_context(*global_context());
builtins->set_global_receiver(*builtins);
@@ -1113,8 +1109,8 @@
}
#ifdef V8_HOST_ARCH_64_BIT
- // TODO(X64): Remove this test when code generation works and is stable.
- CodeGenerator::TestCodeGenerator();
+ // TODO(X64): Remove this when inline caches work.
+ FLAG_use_ic = false;
#endif // V8_HOST_ARCH_64_BIT
@@ -1191,10 +1187,6 @@
apply->shared()->set_length(2);
}
- // Make sure that the builtins object has fast properties.
- // If the ASSERT below fails, please increase the expected number of
- // properties for the builtins object.
- ASSERT(builtins->HasFastProperties());
#ifdef DEBUG
builtins->Verify();
#endif
@@ -1214,6 +1206,17 @@
Handle<JSObject>(js_global->builtins()), DONT_ENUM);
}
+ if (FLAG_capture_stack_traces) {
+ Handle<Object> Error = GetProperty(js_global, "Error");
+ if (Error->IsJSObject()) {
+ Handle<String> name = Factory::LookupAsciiSymbol("captureStackTraces");
+ SetProperty(Handle<JSObject>::cast(Error),
+ name,
+ Factory::true_value(),
+ NONE);
+ }
+ }
+
#ifdef ENABLE_DEBUGGER_SUPPORT
// Expose the debug global object in global if a name for it is specified.
if (FLAG_expose_debug_as != NULL && strlen(FLAG_expose_debug_as) != 0) {
@@ -1445,6 +1448,9 @@
// Set the property.
Handle<String> key = Handle<String>(String::cast(raw_key));
Handle<Object> value = Handle<Object>(properties->ValueAt(i));
+ if (value->IsJSGlobalPropertyCell()) {
+ value = Handle<Object>(JSGlobalPropertyCell::cast(*value)->value());
+ }
PropertyDetails details = properties->DetailsAt(i);
SetProperty(to, key, value, details.attributes());
}
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index f4d8ce8..ee60332 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -133,6 +133,10 @@
return "InvokeBuiltin";
case JSExit:
return "JSExit";
+ case ConvertToDouble:
+ return "ConvertToDouble";
+ case WriteInt32ToHeapNumber:
+ return "WriteInt32ToHeapNumber";
default:
UNREACHABLE();
return NULL;
diff --git a/src/codegen.cc b/src/codegen.cc
index ad5b1ea..b7297d7 100644
--- a/src/codegen.cc
+++ b/src/codegen.cc
@@ -416,8 +416,10 @@
{&CodeGenerator::GenerateIsSmi, "_IsSmi"},
{&CodeGenerator::GenerateIsNonNegativeSmi, "_IsNonNegativeSmi"},
{&CodeGenerator::GenerateIsArray, "_IsArray"},
+ {&CodeGenerator::GenerateIsConstructCall, "_IsConstructCall"},
{&CodeGenerator::GenerateArgumentsLength, "_ArgumentsLength"},
{&CodeGenerator::GenerateArgumentsAccess, "_Arguments"},
+ {&CodeGenerator::GenerateClassOf, "_ClassOf"},
{&CodeGenerator::GenerateValueOf, "_ValueOf"},
{&CodeGenerator::GenerateSetValueOf, "_SetValueOf"},
{&CodeGenerator::GenerateFastCharCodeAt, "_FastCharCodeAt"},
diff --git a/src/codegen.h b/src/codegen.h
index fa414d4..243d87c 100644
--- a/src/codegen.h
+++ b/src/codegen.h
@@ -77,6 +77,8 @@
#include "x64/codegen-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/codegen-arm.h"
+#else
+#error Unsupported target architecture.
#endif
#include "register-allocator.h"
@@ -249,6 +251,36 @@
};
+class CompareStub: public CodeStub {
+ public:
+ CompareStub(Condition cc, bool strict) : cc_(cc), strict_(strict) { }
+
+ void Generate(MacroAssembler* masm);
+
+ private:
+ Condition cc_;
+ bool strict_;
+
+ Major MajorKey() { return Compare; }
+
+ int MinorKey();
+
+ // Branch to the label if the given object isn't a symbol.
+ void BranchIfNonSymbol(MacroAssembler* masm,
+ Label* label,
+ Register object,
+ Register scratch);
+
+#ifdef DEBUG
+ void Print() {
+ PrintF("CompareStub (cc %d), (strict %s)\n",
+ static_cast<int>(cc_),
+ strict_ ? "true" : "false");
+ }
+#endif
+};
+
+
class CEntryStub : public CodeStub {
public:
CEntryStub() { }
diff --git a/src/date-delay.js b/src/date-delay.js
index 0a89783..5a109c6 100644
--- a/src/date-delay.js
+++ b/src/date-delay.js
@@ -28,7 +28,6 @@
// This file relies on the fact that the following declarations have been made
// in v8natives.js:
-// const $isNaN = GlobalIsNaN;
// const $isFinite = GlobalIsFinite;
// -------------------------------------------------------------------
@@ -41,6 +40,11 @@
// changes to these properties.
const $Date = global.Date;
+// Helper function to throw error.
+function ThrowDateTypeError() {
+ throw new $TypeError('this is not a Date object.');
+}
+
// ECMA 262 - 15.9.1.2
function Day(time) {
return FLOOR(time/msPerDay);
@@ -232,7 +236,7 @@
var local_time_offset = %DateLocalTimeOffset();
function LocalTime(time) {
- if ($isNaN(time)) return time;
+ if (NUMBER_IS_NAN(time)) return time;
return time + local_time_offset + DaylightSavingsOffset(time);
}
@@ -242,7 +246,7 @@
function UTC(time) {
- if ($isNaN(time)) return time;
+ if (NUMBER_IS_NAN(time)) return time;
var tmp = time - local_time_offset;
return tmp - DaylightSavingsOffset(tmp);
}
@@ -424,7 +428,7 @@
%SetCode($Date, function(year, month, date, hours, minutes, seconds, ms) {
- if (%IsConstructCall()) {
+ if (%_IsConstructCall()) {
// ECMA 262 - 15.9.3
var argc = %_ArgumentsLength();
if (argc == 0) {
@@ -454,7 +458,7 @@
minutes = argc > 4 ? ToNumber(minutes) : 0;
seconds = argc > 5 ? ToNumber(seconds) : 0;
ms = argc > 6 ? ToNumber(ms) : 0;
- year = (!$isNaN(year) && 0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
+ year = (!NUMBER_IS_NAN(year) && 0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
? 1900 + TO_INTEGER(year) : year;
var day = MakeDay(year, month, date);
var time = MakeTime(hours, minutes, seconds, ms);
@@ -468,106 +472,105 @@
// Helper functions.
function GetTimeFrom(aDate) {
- if (IS_DATE(aDate)) return %_ValueOf(aDate);
- throw new $TypeError('this is not a Date object.');
+ return DATE_VALUE(aDate);
}
function GetMillisecondsFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return msFromTime(LocalTimeNoCheck(t));
}
function GetUTCMillisecondsFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return msFromTime(t);
}
function GetSecondsFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return SecFromTime(LocalTimeNoCheck(t));
}
function GetUTCSecondsFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return SecFromTime(t);
}
function GetMinutesFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return MinFromTime(LocalTimeNoCheck(t));
}
function GetUTCMinutesFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return MinFromTime(t);
}
function GetHoursFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return HourFromTime(LocalTimeNoCheck(t));
}
function GetUTCHoursFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return HourFromTime(t);
}
function GetFullYearFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
// Ignore the DST offset for year computations.
return YearFromTime(t + local_time_offset);
}
function GetUTCFullYearFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return YearFromTime(t);
}
function GetMonthFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return MonthFromTime(LocalTimeNoCheck(t));
}
function GetUTCMonthFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return MonthFromTime(t);
}
function GetDateFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return DateFromTime(LocalTimeNoCheck(t));
}
function GetUTCDateFrom(aDate) {
- var t = GetTimeFrom(aDate);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(aDate);
+ if (NUMBER_IS_NAN(t)) return t;
return DateFromTime(t);
}
@@ -659,7 +662,7 @@
minutes = argc > 4 ? ToNumber(minutes) : 0;
seconds = argc > 5 ? ToNumber(seconds) : 0;
ms = argc > 6 ? ToNumber(ms) : 0;
- year = (!$isNaN(year) && 0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
+ year = (!NUMBER_IS_NAN(year) && 0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
? 1900 + TO_INTEGER(year) : year;
var day = MakeDay(year, month, date);
var time = MakeTime(hours, minutes, seconds, ms);
@@ -676,24 +679,24 @@
// ECMA 262 - 15.9.5.2
function DateToString() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return kInvalidDate;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) return kInvalidDate;
return DatePrintString(LocalTimeNoCheck(t)) + LocalTimezoneString(t);
}
// ECMA 262 - 15.9.5.3
function DateToDateString() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return kInvalidDate;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) return kInvalidDate;
return DateString(LocalTimeNoCheck(t));
}
// ECMA 262 - 15.9.5.4
function DateToTimeString() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return kInvalidDate;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) return kInvalidDate;
var lt = LocalTimeNoCheck(t);
return TimeString(lt) + LocalTimezoneString(lt);
}
@@ -707,16 +710,16 @@
// ECMA 262 - 15.9.5.6
function DateToLocaleDateString() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return kInvalidDate;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) return kInvalidDate;
return LongDateString(LocalTimeNoCheck(t));
}
// ECMA 262 - 15.9.5.7
function DateToLocaleTimeString() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return kInvalidDate;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) return kInvalidDate;
var lt = LocalTimeNoCheck(t);
return TimeString(lt);
}
@@ -724,13 +727,13 @@
// ECMA 262 - 15.9.5.8
function DateValueOf() {
- return GetTimeFrom(this);
+ return DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.9
function DateGetTime() {
- return GetTimeFrom(this);
+ return DATE_VALUE(this);
}
@@ -772,16 +775,16 @@
// ECMA 262 - 15.9.5.16
function DateGetDay() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return t;
+ var t = %_ValueOf(this);
+ if (NUMBER_IS_NAN(t)) return t;
return WeekDay(LocalTimeNoCheck(t));
}
// ECMA 262 - 15.9.5.17
function DateGetUTCDay() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return t;
+ var t = %_ValueOf(this);
+ if (NUMBER_IS_NAN(t)) return t;
return WeekDay(t);
}
@@ -836,22 +839,22 @@
// ECMA 262 - 15.9.5.26
function DateGetTimezoneOffset() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return t;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) return t;
return (t - LocalTimeNoCheck(t)) / msPerMinute;
}
// ECMA 262 - 15.9.5.27
function DateSetTime(ms) {
- if (!IS_DATE(this)) throw new $TypeError('this is not a Date object.');
+ if (!IS_DATE(this)) ThrowDateTypeError();
return %_SetValueOf(this, TimeClip(ToNumber(ms)));
}
// ECMA 262 - 15.9.5.28
function DateSetMilliseconds(ms) {
- var t = LocalTime(GetTimeFrom(this));
+ var t = LocalTime(DATE_VALUE(this));
ms = ToNumber(ms);
var time = MakeTime(HourFromTime(t), MinFromTime(t), SecFromTime(t), ms);
return %_SetValueOf(this, TimeClip(UTC(MakeDate(Day(t), time))));
@@ -860,7 +863,7 @@
// ECMA 262 - 15.9.5.29
function DateSetUTCMilliseconds(ms) {
- var t = GetTimeFrom(this);
+ var t = DATE_VALUE(this);
ms = ToNumber(ms);
var time = MakeTime(HourFromTime(t), MinFromTime(t), SecFromTime(t), ms);
return %_SetValueOf(this, TimeClip(MakeDate(Day(t), time)));
@@ -869,7 +872,7 @@
// ECMA 262 - 15.9.5.30
function DateSetSeconds(sec, ms) {
- var t = LocalTime(GetTimeFrom(this));
+ var t = LocalTime(DATE_VALUE(this));
sec = ToNumber(sec);
ms = %_ArgumentsLength() < 2 ? GetMillisecondsFrom(this) : ToNumber(ms);
var time = MakeTime(HourFromTime(t), MinFromTime(t), sec, ms);
@@ -879,7 +882,7 @@
// ECMA 262 - 15.9.5.31
function DateSetUTCSeconds(sec, ms) {
- var t = GetTimeFrom(this);
+ var t = DATE_VALUE(this);
sec = ToNumber(sec);
ms = %_ArgumentsLength() < 2 ? GetUTCMillisecondsFrom(this) : ToNumber(ms);
var time = MakeTime(HourFromTime(t), MinFromTime(t), sec, ms);
@@ -889,7 +892,7 @@
// ECMA 262 - 15.9.5.33
function DateSetMinutes(min, sec, ms) {
- var t = LocalTime(GetTimeFrom(this));
+ var t = LocalTime(DATE_VALUE(this));
min = ToNumber(min);
var argc = %_ArgumentsLength();
sec = argc < 2 ? GetSecondsFrom(this) : ToNumber(sec);
@@ -901,7 +904,7 @@
// ECMA 262 - 15.9.5.34
function DateSetUTCMinutes(min, sec, ms) {
- var t = GetTimeFrom(this);
+ var t = DATE_VALUE(this);
min = ToNumber(min);
var argc = %_ArgumentsLength();
sec = argc < 2 ? GetUTCSecondsFrom(this) : ToNumber(sec);
@@ -913,7 +916,7 @@
// ECMA 262 - 15.9.5.35
function DateSetHours(hour, min, sec, ms) {
- var t = LocalTime(GetTimeFrom(this));
+ var t = LocalTime(DATE_VALUE(this));
hour = ToNumber(hour);
var argc = %_ArgumentsLength();
min = argc < 2 ? GetMinutesFrom(this) : ToNumber(min);
@@ -926,7 +929,7 @@
// ECMA 262 - 15.9.5.34
function DateSetUTCHours(hour, min, sec, ms) {
- var t = GetTimeFrom(this);
+ var t = DATE_VALUE(this);
hour = ToNumber(hour);
var argc = %_ArgumentsLength();
min = argc < 2 ? GetUTCMinutesFrom(this) : ToNumber(min);
@@ -939,7 +942,7 @@
// ECMA 262 - 15.9.5.36
function DateSetDate(date) {
- var t = LocalTime(GetTimeFrom(this));
+ var t = LocalTime(DATE_VALUE(this));
date = ToNumber(date);
var day = MakeDay(YearFromTime(t), MonthFromTime(t), date);
return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
@@ -948,7 +951,7 @@
// ECMA 262 - 15.9.5.37
function DateSetUTCDate(date) {
- var t = GetTimeFrom(this);
+ var t = DATE_VALUE(this);
date = ToNumber(date);
var day = MakeDay(YearFromTime(t), MonthFromTime(t), date);
return %_SetValueOf(this, TimeClip(MakeDate(day, TimeWithinDay(t))));
@@ -957,7 +960,7 @@
// ECMA 262 - 15.9.5.38
function DateSetMonth(month, date) {
- var t = LocalTime(GetTimeFrom(this));
+ var t = LocalTime(DATE_VALUE(this));
month = ToNumber(month);
date = %_ArgumentsLength() < 2 ? GetDateFrom(this) : ToNumber(date);
var day = MakeDay(YearFromTime(t), month, date);
@@ -967,7 +970,7 @@
// ECMA 262 - 15.9.5.39
function DateSetUTCMonth(month, date) {
- var t = GetTimeFrom(this);
+ var t = DATE_VALUE(this);
month = ToNumber(month);
date = %_ArgumentsLength() < 2 ? GetUTCDateFrom(this) : ToNumber(date);
var day = MakeDay(YearFromTime(t), month, date);
@@ -977,8 +980,8 @@
// ECMA 262 - 15.9.5.40
function DateSetFullYear(year, month, date) {
- var t = GetTimeFrom(this);
- t = $isNaN(t) ? 0 : LocalTimeNoCheck(t);
+ var t = DATE_VALUE(this);
+ t = NUMBER_IS_NAN(t) ? 0 : LocalTimeNoCheck(t);
year = ToNumber(year);
var argc = %_ArgumentsLength();
month = argc < 2 ? MonthFromTime(t) : ToNumber(month);
@@ -990,8 +993,8 @@
// ECMA 262 - 15.9.5.41
function DateSetUTCFullYear(year, month, date) {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) t = 0;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) t = 0;
var argc = %_ArgumentsLength();
year = ToNumber(year);
month = argc < 2 ? MonthFromTime(t) : ToNumber(month);
@@ -1003,8 +1006,8 @@
// ECMA 262 - 15.9.5.42
function DateToUTCString() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return kInvalidDate;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) return kInvalidDate;
// Return UTC string of the form: Sat, 31 Jan 1970 23:00:00 GMT
return WeekDays[WeekDay(t)] + ', '
+ TwoDigitString(DateFromTime(t)) + ' '
@@ -1016,18 +1019,18 @@
// ECMA 262 - B.2.4
function DateGetYear() {
- var t = GetTimeFrom(this);
- if ($isNaN(t)) return $NaN;
+ var t = DATE_VALUE(this);
+ if (NUMBER_IS_NAN(t)) return $NaN;
return YearFromTime(LocalTimeNoCheck(t)) - 1900;
}
// ECMA 262 - B.2.5
function DateSetYear(year) {
- var t = LocalTime(GetTimeFrom(this));
- if ($isNaN(t)) t = 0;
+ var t = LocalTime(DATE_VALUE(this));
+ if (NUMBER_IS_NAN(t)) t = 0;
year = ToNumber(year);
- if ($isNaN(year)) return %_SetValueOf(this, $NaN);
+ if (NUMBER_IS_NAN(year)) return %_SetValueOf(this, $NaN);
year = (0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
? 1900 + TO_INTEGER(year) : year;
var day = MakeDay(year, MonthFromTime(t), DateFromTime(t));
diff --git a/src/disassembler.cc b/src/disassembler.cc
index 95022d0..e2f908d 100644
--- a/src/disassembler.cc
+++ b/src/disassembler.cc
@@ -239,6 +239,13 @@
InlineCacheState ic_state = code->ic_state();
out.AddFormatted(" %s, %s", Code::Kind2String(kind),
Code::ICState2String(ic_state));
+ if (ic_state == MONOMORPHIC) {
+ PropertyType type = code->type();
+ out.AddFormatted(", %s", Code::PropertyType2String(type));
+ }
+ if (code->ic_in_loop() == IN_LOOP) {
+ out.AddFormatted(", in_loop");
+ }
if (kind == Code::CALL_IC) {
out.AddFormatted(", argc = %d", code->arguments_count());
}
diff --git a/src/execution.cc b/src/execution.cc
index fa3c2ec..adc1872 100644
--- a/src/execution.cc
+++ b/src/execution.cc
@@ -38,6 +38,8 @@
#include "x64/simulator-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/simulator-arm.h"
+#else
+#error Unsupported target architecture.
#endif
#include "debug.h"
diff --git a/src/factory.cc b/src/factory.cc
index fe19873..216a07e 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -619,6 +619,14 @@
}
+Handle<GlobalObject> Factory::NewGlobalObject(
+ Handle<JSFunction> constructor) {
+ CALL_HEAP_FUNCTION(Heap::AllocateGlobalObject(*constructor),
+ GlobalObject);
+}
+
+
+
Handle<JSObject> Factory::NewJSObjectFromMap(Handle<Map> map) {
CALL_HEAP_FUNCTION(Heap::AllocateJSObjectFromMap(*map, NOT_TENURED),
JSObject);
diff --git a/src/factory.h b/src/factory.h
index 95dbee9..1ec9f1b 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -183,6 +183,9 @@
static Handle<JSObject> NewJSObject(Handle<JSFunction> constructor,
PretenureFlag pretenure = NOT_TENURED);
+ // Global objects are pretenured.
+ static Handle<GlobalObject> NewGlobalObject(Handle<JSFunction> constructor);
+
// JS objects are pretenured when allocated by the bootstrapper and
// runtime.
static Handle<JSObject> NewJSObjectFromMap(Handle<Map> map);
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 8110e12..983fe22 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -110,6 +110,7 @@
DEFINE_string(expose_debug_as, NULL, "expose debug in global object")
DEFINE_string(natives_file, NULL, "alternative natives file")
DEFINE_bool(expose_gc, false, "expose gc extension")
+DEFINE_bool(capture_stack_traces, false, "capture stack traces")
// builtins-ia32.cc
DEFINE_bool(inline_new, true, "use fast inline allocation")
diff --git a/src/frames-inl.h b/src/frames-inl.h
index 0e2adb9..b04cf50 100644
--- a/src/frames-inl.h
+++ b/src/frames-inl.h
@@ -36,6 +36,8 @@
#include "x64/frames-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/frames-arm.h"
+#else
+#error Unsupported target architecture.
#endif
namespace v8 {
diff --git a/src/heap.cc b/src/heap.cc
index bf6fccd..4a317e3 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -79,7 +79,7 @@
// semispace_size_ should be a power of 2 and old_generation_size_ should be
// a multiple of Page::kPageSize.
-#if V8_HOST_ARCH_ARM
+#if V8_TARGET_ARCH_ARM
int Heap::semispace_size_ = 512*KB;
int Heap::old_generation_size_ = 128*MB;
int Heap::initial_semispace_size_ = 128*KB;
@@ -221,6 +221,7 @@
// NewSpace statistics are logged exactly once when --log-gc is turned on.
#if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
if (FLAG_heap_stats) {
+ new_space_.CollectStatistics();
ReportHeapStatistics("After GC");
} else if (FLAG_log_gc) {
new_space_.ReportStatistics();
@@ -428,22 +429,8 @@
old_gen_allocation_limit_ =
old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
old_gen_exhausted_ = false;
-
- // If we have used the mark-compact collector to collect the new
- // space, and it has not compacted the new space, we force a
- // separate scavenge collection. This is a hack. It covers the
- // case where (1) a new space collection was requested, (2) the
- // collector selection policy selected the mark-compact collector,
- // and (3) the mark-compact collector policy selected not to
- // compact the new space. In that case, there is no more (usable)
- // free space in the new space after the collection compared to
- // before.
- if (space == NEW_SPACE && !MarkCompactCollector::HasCompacted()) {
- Scavenge();
- }
- } else {
- Scavenge();
}
+ Scavenge();
Counters::objs_since_last_young.Set(0);
PostGarbageCollectionProcessing();
@@ -1070,6 +1057,11 @@
if (obj->IsFailure()) return false;
oddball_map_ = Map::cast(obj);
+ obj = AllocatePartialMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
+ JSGlobalPropertyCell::kSize);
+ if (obj->IsFailure()) return false;
+ global_property_cell_map_ = Map::cast(obj);
+
// Allocate the empty array
obj = AllocateEmptyFixedArray();
if (obj->IsFailure()) return false;
@@ -1095,6 +1087,10 @@
oddball_map()->set_instance_descriptors(empty_descriptor_array());
oddball_map()->set_code_cache(empty_fixed_array());
+ global_property_cell_map()->set_instance_descriptors(
+ empty_descriptor_array());
+ global_property_cell_map()->set_code_cache(empty_fixed_array());
+
// Fix prototype object for existing maps.
meta_map()->set_prototype(null_value());
meta_map()->set_constructor(null_value());
@@ -1104,6 +1100,9 @@
oddball_map()->set_prototype(null_value());
oddball_map()->set_constructor(null_value());
+ global_property_cell_map()->set_prototype(null_value());
+ global_property_cell_map()->set_constructor(null_value());
+
obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
if (obj->IsFailure()) return false;
heap_number_map_ = Map::cast(obj);
@@ -1230,6 +1229,17 @@
}
+Object* Heap::AllocateJSGlobalPropertyCell(Object* value) {
+ Object* result = AllocateRaw(JSGlobalPropertyCell::kSize,
+ OLD_POINTER_SPACE,
+ OLD_POINTER_SPACE);
+ if (result->IsFailure()) return result;
+ HeapObject::cast(result)->set_map(global_property_cell_map());
+ JSGlobalPropertyCell::cast(result)->set_value(value);
+ return result;
+}
+
+
Object* Heap::CreateOddball(Map* map,
const char* to_string,
Object* to_number) {
@@ -1257,28 +1267,49 @@
return true;
}
+
+void Heap::CreateCEntryStub() {
+ CEntryStub stub;
+ c_entry_code_ = *stub.GetCode();
+}
+
+
+void Heap::CreateCEntryDebugBreakStub() {
+ CEntryDebugBreakStub stub;
+ c_entry_debug_break_code_ = *stub.GetCode();
+}
+
+
+void Heap::CreateJSEntryStub() {
+ JSEntryStub stub;
+ js_entry_code_ = *stub.GetCode();
+}
+
+
+void Heap::CreateJSConstructEntryStub() {
+ JSConstructEntryStub stub;
+ js_construct_entry_code_ = *stub.GetCode();
+}
+
+
void Heap::CreateFixedStubs() {
// Here we create roots for fixed stubs. They are needed at GC
// for cooking and uncooking (check out frames.cc).
// The eliminates the need for doing dictionary lookup in the
// stub cache for these stubs.
HandleScope scope;
- {
- CEntryStub stub;
- c_entry_code_ = *stub.GetCode();
- }
- {
- CEntryDebugBreakStub stub;
- c_entry_debug_break_code_ = *stub.GetCode();
- }
- {
- JSEntryStub stub;
- js_entry_code_ = *stub.GetCode();
- }
- {
- JSConstructEntryStub stub;
- js_construct_entry_code_ = *stub.GetCode();
- }
+ // gcc-4.4 has problem generating correct code of following snippet:
+ // { CEntryStub stub;
+ // c_entry_code_ = *stub.GetCode();
+ // }
+ // { CEntryDebugBreakStub stub;
+ // c_entry_debug_break_code_ = *stub.GetCode();
+ // }
+ // To workaround the problem, make separate functions without inlining.
+ Heap::CreateCEntryStub();
+ Heap::CreateCEntryDebugBreakStub();
+ Heap::CreateJSEntryStub();
+ Heap::CreateJSConstructEntryStub();
}
@@ -1514,7 +1545,7 @@
Object* Heap::AllocateSharedFunctionInfo(Object* name) {
- Object* result = Allocate(shared_function_info_map(), NEW_SPACE);
+ Object* result = Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
if (result->IsFailure()) return result;
SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
@@ -2006,7 +2037,7 @@
// Allocate the backing storage for the properties.
int prop_size = map->unused_property_fields() - map->inobject_properties();
- Object* properties = AllocateFixedArray(prop_size);
+ Object* properties = AllocateFixedArray(prop_size, pretenure);
if (properties->IsFailure()) return properties;
// Allocate the JSObject.
@@ -2034,7 +2065,39 @@
Map::cast(initial_map)->set_constructor(constructor);
}
// Allocate the object based on the constructors initial map.
- return AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
+ Object* result =
+ AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
+ // Make sure result is NOT a global object if valid.
+ ASSERT(result->IsFailure() || !result->IsGlobalObject());
+ return result;
+}
+
+
+Object* Heap::AllocateGlobalObject(JSFunction* constructor) {
+ ASSERT(constructor->has_initial_map());
+ // Make sure no field properties are described in the initial map.
+ // This guarantees us that normalizing the properties does not
+ // require us to change property values to JSGlobalPropertyCells.
+ ASSERT(constructor->initial_map()->NextFreePropertyIndex() == 0);
+
+ // Make sure we don't have a ton of pre-allocated slots in the
+ // global objects. They will be unused once we normalize the object.
+ ASSERT(constructor->initial_map()->unused_property_fields() == 0);
+ ASSERT(constructor->initial_map()->inobject_properties() == 0);
+
+ // Allocate the object based on the constructors initial map.
+ Object* result = AllocateJSObjectFromMap(constructor->initial_map(), TENURED);
+ if (result->IsFailure()) return result;
+
+ // Normalize the result.
+ JSObject* global = JSObject::cast(result);
+ result = global->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
+ if (result->IsFailure()) return result;
+
+ // Make sure result is a global object with properties in dictionary.
+ ASSERT(global->IsGlobalObject());
+ ASSERT(!global->HasFastProperties());
+ return global;
}
@@ -2111,7 +2174,7 @@
// Allocate the backing storage for the properties.
int prop_size = map->unused_property_fields() - map->inobject_properties();
- Object* properties = AllocateFixedArray(prop_size);
+ Object* properties = AllocateFixedArray(prop_size, TENURED);
if (properties->IsFailure()) return properties;
// Reset the map for the object.
diff --git a/src/heap.h b/src/heap.h
index 31adcbd..77cea1b 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -99,6 +99,7 @@
V(Map, global_context_map) \
V(Map, code_map) \
V(Map, oddball_map) \
+ V(Map, global_property_cell_map) \
V(Map, boilerplate_function_map) \
V(Map, shared_function_info_map) \
V(Map, proxy_map) \
@@ -288,6 +289,12 @@
static Object* AllocateJSObject(JSFunction* constructor,
PretenureFlag pretenure = NOT_TENURED);
+ // Allocates and initializes a new global object based on a constructor.
+ // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
+ // failed.
+ // Please note this does not perform a garbage collection.
+ static Object* AllocateGlobalObject(JSFunction* constructor);
+
// Returns a deep copy of the JavaScript object.
// Properties and elements are copied too.
// Returns failure if allocation failed.
@@ -408,6 +415,12 @@
// Please note this does not perform a garbage collection.
static Object* AllocateByteArray(int length);
+ // Allocate a tenured JS global property cell.
+ // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
+ // failed.
+ // Please note this does not perform a garbage collection.
+ static Object* AllocateJSGlobalPropertyCell(Object* value);
+
// Allocates a fixed array initialized with undefined values
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
@@ -920,7 +933,15 @@
static bool CreateInitialMaps();
static bool CreateInitialObjects();
+
+ // These four Create*EntryStub functions are here because of a gcc-4.4 bug
+ // that assigns wrong vtable entries.
+ static void CreateCEntryStub();
+ static void CreateCEntryDebugBreakStub();
+ static void CreateJSEntryStub();
+ static void CreateJSConstructEntryStub();
static void CreateFixedStubs();
+
static Object* CreateOddball(Map* map,
const char* to_string,
Object* to_number);
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index 5968610..f3cb854 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -919,6 +919,14 @@
}
+void Assembler::imul(Register reg) {
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ EMIT(0xF7);
+ EMIT(0xE8 | reg.code());
+}
+
+
void Assembler::imul(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index 92c390c..70b510e 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -544,15 +544,18 @@
void idiv(Register src);
- void imul(Register dst, const Operand& src);
- void imul(Register dst, Register src, int32_t imm32);
+ // Signed multiply instructions.
+ void imul(Register src); // edx:eax = eax * src.
+ void imul(Register dst, const Operand& src); // dst = dst * src.
+ void imul(Register dst, Register src, int32_t imm32); // dst = src * imm32.
void inc(Register dst);
void inc(const Operand& dst);
void lea(Register dst, const Operand& src);
- void mul(Register src);
+ // Unsigned multiply instruction.
+ void mul(Register src); // edx:eax = eax * reg.
void neg(Register dst);
diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc
index 59c1d45..3b2eaa0 100644
--- a/src/ia32/codegen-ia32.cc
+++ b/src/ia32/codegen-ia32.cc
@@ -1856,40 +1856,6 @@
}
-class CompareStub: public CodeStub {
- public:
- CompareStub(Condition cc, bool strict) : cc_(cc), strict_(strict) { }
-
- void Generate(MacroAssembler* masm);
-
- private:
- Condition cc_;
- bool strict_;
-
- Major MajorKey() { return Compare; }
-
- int MinorKey() {
- // Encode the three parameters in a unique 16 bit value.
- ASSERT(static_cast<int>(cc_) < (1 << 15));
- return (static_cast<int>(cc_) << 1) | (strict_ ? 1 : 0);
- }
-
- // Branch to the label if the given object isn't a symbol.
- void BranchIfNonSymbol(MacroAssembler* masm,
- Label* label,
- Register object,
- Register scratch);
-
-#ifdef DEBUG
- void Print() {
- PrintF("CompareStub (cc %d), (strict %s)\n",
- static_cast<int>(cc_),
- strict_ ? "true" : "false");
- }
-#endif
-};
-
-
void CodeGenerator::Comparison(Condition cc,
bool strict,
ControlDestination* dest) {
@@ -4987,6 +4953,29 @@
}
+void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 0);
+
+ // Get the frame pointer for the calling frame.
+ Result fp = allocator()->Allocate();
+ __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+
+ // Skip the arguments adaptor frame if it exists.
+ Label check_frame_marker;
+ __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
+ Immediate(ArgumentsAdaptorFrame::SENTINEL));
+ __ j(not_equal, &check_frame_marker);
+ __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
+
+ // Check the marker in the calling frame.
+ __ bind(&check_frame_marker);
+ __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
+ Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
+ fp.Unuse();
+ destination()->Split(equal);
+}
+
+
void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
// ArgumentsAccessStub takes the parameter count as an input argument
@@ -4999,6 +4988,70 @@
}
+void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 1);
+ JumpTarget leave, null, function, non_function_constructor;
+ Load(args->at(0)); // Load the object.
+ Result obj = frame_->Pop();
+ obj.ToRegister();
+ frame_->Spill(obj.reg());
+
+ // If the object is a smi, we return null.
+ __ test(obj.reg(), Immediate(kSmiTagMask));
+ null.Branch(zero);
+
+ // Check that the object is a JS object but take special care of JS
+ // functions to make sure they have 'Function' as their class.
+ { Result tmp = allocator()->Allocate();
+ __ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
+ __ movzx_b(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset));
+ __ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE);
+ null.Branch(less);
+
+ // As long as JS_FUNCTION_TYPE is the last instance type and it is
+ // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
+ // LAST_JS_OBJECT_TYPE.
+ ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
+ __ cmp(tmp.reg(), JS_FUNCTION_TYPE);
+ function.Branch(equal);
+ }
+
+ // Check if the constructor in the map is a function.
+ { Result tmp = allocator()->Allocate();
+ __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
+ __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
+ non_function_constructor.Branch(not_equal);
+ }
+
+ // The map register now contains the constructor function. Grab the
+ // instance class name from there.
+ __ mov(obj.reg(),
+ FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
+ __ mov(obj.reg(),
+ FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
+ frame_->Push(&obj);
+ leave.Jump();
+
+ // Functions have class 'Function'.
+ function.Bind();
+ frame_->Push(Factory::function_class_symbol());
+ leave.Jump();
+
+ // Objects with a non-function constructor have class 'Object'.
+ non_function_constructor.Bind();
+ frame_->Push(Factory::Object_symbol());
+ leave.Jump();
+
+ // Non-JS objects have class null.
+ null.Bind();
+ frame_->Push(Factory::null_value());
+
+ // All done.
+ leave.Bind();
+}
+
+
void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
JumpTarget leave;
@@ -7856,6 +7909,12 @@
}
+int CompareStub::MinorKey() {
+ // Encode the two parameters in a unique 16 bit value.
+ ASSERT(static_cast<unsigned>(cc_) < (1 << 15));
+ return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0);
+}
+
#undef __
} } // namespace v8::internal
diff --git a/src/ia32/codegen-ia32.h b/src/ia32/codegen-ia32.h
index d25d07c..5cd50b8 100644
--- a/src/ia32/codegen-ia32.h
+++ b/src/ia32/codegen-ia32.h
@@ -522,11 +522,15 @@
void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
void GenerateIsArray(ZoneList<Expression*>* args);
+ // Support for construct call checks.
+ void GenerateIsConstructCall(ZoneList<Expression*>* args);
+
// Support for arguments.length and arguments[?].
void GenerateArgumentsLength(ZoneList<Expression*>* args);
void GenerateArgumentsAccess(ZoneList<Expression*>* args);
- // Support for accessing the value field of an object (used by Date).
+ // Support for accessing the class and value fields of an object.
+ void GenerateClassOf(ZoneList<Expression*>* args);
void GenerateValueOf(ZoneList<Expression*>* args);
void GenerateSetValueOf(ZoneList<Expression*>* args);
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index 1ba4757..004dad2 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -66,9 +66,15 @@
// Test the has_named_interceptor bit in the map.
__ test(FieldOperand(r0, Map::kInstanceAttributesOffset),
Immediate(1 << (Map::kHasNamedInterceptor + (3 * 8))));
+
// Jump to miss if the interceptor bit is set.
__ j(not_zero, miss_label, not_taken);
+ // Bail out if we have a JS global object.
+ __ movzx_b(r0, FieldOperand(r0, Map::kInstanceTypeOffset));
+ __ cmp(r0, JS_GLOBAL_PROXY_TYPE);
+ __ j(equal, miss_label, not_taken);
+
// Check that the properties array is a dictionary.
__ mov(r0, FieldOperand(r1, JSObject::kPropertiesOffset));
__ cmp(FieldOperand(r0, HeapObject::kMapOffset),
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index b31f706..6f84edc 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -475,9 +475,7 @@
Object* CallStubCompiler::CompileCallField(Object* object,
JSObject* holder,
int index,
- String* name,
- Code::Flags flags) {
- ASSERT_EQ(FIELD, Code::ExtractTypeFromFlags(flags));
+ String* name) {
// ----------- S t a t e -------------
// -----------------------------------
Label miss;
@@ -518,16 +516,14 @@
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
- return GetCodeWithFlags(flags, name);
+ return GetCode(FIELD, name);
}
Object* CallStubCompiler::CompileCallConstant(Object* object,
JSObject* holder,
JSFunction* function,
- CheckType check,
- Code::Flags flags) {
- ASSERT_EQ(CONSTANT_FUNCTION, Code::ExtractTypeFromFlags(flags));
+ CheckType check) {
// ----------- S t a t e -------------
// -----------------------------------
Label miss;
@@ -627,6 +623,7 @@
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
Handle<Code> code(function->code());
ParameterCount expected(function->shared()->formal_parameter_count());
__ InvokeCode(code, expected, arguments(),
@@ -642,7 +639,7 @@
if (function->shared()->name()->IsString()) {
function_name = String::cast(function->shared()->name());
}
- return GetCodeWithFlags(flags, function_name);
+ return GetCode(CONSTANT_FUNCTION, function_name);
}
@@ -718,6 +715,59 @@
}
+Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ JSFunction* function,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -----------------------------------
+ Label miss;
+
+ __ IncrementCounter(&Counters::call_global_inline, 1);
+
+ // Get the number of arguments.
+ const int argc = arguments().immediate();
+
+ // Check that the map of the global has not changed.
+ __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
+ __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
+ Immediate(Handle<Map>(object->map())));
+ __ j(not_equal, &miss, not_taken);
+
+ // Get the value from the cell.
+ __ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell)));
+ __ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset));
+
+ // Check that the cell contains the same function.
+ __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function)));
+ __ j(not_equal, &miss, not_taken);
+
+ // Patch the receiver on the stack with the global proxy.
+ __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
+ __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
+
+ // Setup the context (function already in edi).
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+
+ // Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ __ InvokeCode(code, expected, arguments(),
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+
+ // Handle call cache miss.
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::call_global_inline, 1);
+ __ IncrementCounter(&Counters::call_global_inline_miss, 1);
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
Object* StoreStubCompiler::CompileStoreField(JSObject* object,
int index,
Map* transition,
@@ -861,6 +911,49 @@
}
+Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- eax : value
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[4] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ IncrementCounter(&Counters::named_store_global_inline, 1);
+
+ // Check that the map of the global has not changed.
+ __ mov(ebx, (Operand(esp, kPointerSize)));
+ __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
+ Immediate(Handle<Map>(object->map())));
+ __ j(not_equal, &miss, not_taken);
+
+ // Store the value in the cell.
+ __ mov(ecx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
+ __ mov(FieldOperand(ecx, JSGlobalPropertyCell::kValueOffset), eax);
+
+ // RecordWrite clobbers the value register. Pass the value being stored in
+ // edx.
+ __ mov(edx, eax);
+ __ RecordWrite(ecx, JSGlobalPropertyCell::kValueOffset, edx, ebx);
+
+ // Return the value (register eax).
+ __ ret(0);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::named_store_global_inline, 1);
+ __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
int index,
Map* transition,
@@ -999,6 +1092,47 @@
}
+Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ String* name,
+ bool is_dont_delete) {
+ // ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[4] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ IncrementCounter(&Counters::named_load_global_inline, 1);
+
+ // Check that the map of the global has not changed.
+ __ mov(eax, (Operand(esp, kPointerSize)));
+ __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
+ Immediate(Handle<Map>(object->map())));
+ __ j(not_equal, &miss, not_taken);
+
+ // Get the value from the cell.
+ __ mov(eax, Immediate(Handle<JSGlobalPropertyCell>(cell)));
+ __ mov(eax, FieldOperand(eax, JSGlobalPropertyCell::kValueOffset));
+
+ // Check for deleted property if property can actually be deleted.
+ if (!is_dont_delete) {
+ __ cmp(eax, Factory::the_hole_value());
+ __ j(equal, &miss, not_taken);
+ }
+
+ __ ret(0);
+
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::named_load_global_inline, 1);
+ __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
+}
+
+
Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
JSObject* receiver,
JSObject* holder,
diff --git a/src/ic.cc b/src/ic.cc
index 35c4036..43dc1df 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -265,6 +265,39 @@
}
+static bool HasInterceptorGetter(JSObject* object) {
+ return !object->GetNamedInterceptor()->getter()->IsUndefined();
+}
+
+
+static void LookupForRead(Object* object,
+ String* name,
+ LookupResult* lookup) {
+ object->Lookup(name, lookup);
+ if (lookup->IsNotFound() || lookup->type() != INTERCEPTOR) {
+ return;
+ }
+
+ JSObject* holder = lookup->holder();
+ if (HasInterceptorGetter(holder)) {
+ return;
+ }
+
+ // There is no getter, just skip it and lookup down the proto chain
+ holder->LocalLookupRealNamedProperty(name, lookup);
+ if (lookup->IsValid()) {
+ return;
+ }
+
+ Object* proto = holder->GetPrototype();
+ if (proto == Heap::null_value()) {
+ return;
+ }
+
+ LookupForRead(proto, name, lookup);
+}
+
+
Object* CallIC::TryCallAsFunction(Object* object) {
HandleScope scope;
Handle<Object> target(object);
@@ -312,7 +345,7 @@
// Lookup the property in the object.
LookupResult lookup;
- object->Lookup(*name, &lookup);
+ LookupForRead(*object, *name, &lookup);
if (!lookup.IsValid()) {
// If the object does not have the requested property, check which
@@ -328,11 +361,11 @@
UpdateCaches(&lookup, state, object, name);
}
+ // Get the property.
+ PropertyAttributes attr;
+ result = object->GetProperty(*object, &lookup, *name, &attr);
+ if (result->IsFailure()) return result;
if (lookup.type() == INTERCEPTOR) {
- // Get the property.
- PropertyAttributes attr;
- result = object->GetProperty(*name, &attr);
- if (result->IsFailure()) return result;
// If the object does not have the requested property, check which
// exception we need to throw.
if (attr == ABSENT) {
@@ -341,11 +374,6 @@
}
return TypeError("undefined_method", object, name);
}
- } else {
- // Lookup is valid and no interceptors are involved. Get the
- // property.
- result = object->GetProperty(*name);
- if (result->IsFailure()) return result;
}
ASSERT(result != Heap::the_hole_value());
@@ -423,17 +451,33 @@
break;
}
case NORMAL: {
- // There is only one shared stub for calling normalized
- // properties. It does not traverse the prototype chain, so the
- // property must be found in the receiver for the stub to be
- // applicable.
if (!object->IsJSObject()) return;
- Handle<JSObject> receiver = Handle<JSObject>::cast(object);
- if (lookup->holder() != *receiver) return;
- code = StubCache::ComputeCallNormal(argc, in_loop, *name, *receiver);
+ if (object->IsGlobalObject()) {
+ // The stub generated for the global object picks the value directly
+ // from the property cell. So the property must be directly on the
+ // global object.
+ Handle<GlobalObject> global = Handle<GlobalObject>::cast(object);
+ if (lookup->holder() != *global) return;
+ JSGlobalPropertyCell* cell =
+ JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
+ if (cell->value()->IsJSFunction()) {
+ JSFunction* function = JSFunction::cast(cell->value());
+ code = StubCache::ComputeCallGlobal(argc, in_loop, *name, *global,
+ cell, function);
+ }
+ } else {
+ // There is only one shared stub for calling normalized
+ // properties. It does not traverse the prototype chain, so the
+ // property must be found in the receiver for the stub to be
+ // applicable.
+ Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+ if (lookup->holder() != *receiver) return;
+ code = StubCache::ComputeCallNormal(argc, in_loop, *name, *receiver);
+ }
break;
}
case INTERCEPTOR: {
+ ASSERT(HasInterceptorGetter(lookup->holder()));
code = StubCache::ComputeCallInterceptor(argc, *name, *object,
lookup->holder());
break;
@@ -520,7 +564,7 @@
// Named lookup in the object.
LookupResult lookup;
- object->Lookup(*name, &lookup);
+ LookupForRead(*object, *name, &lookup);
// If lookup is invalid, check if we need to throw an exception.
if (!lookup.IsValid()) {
@@ -614,12 +658,24 @@
break;
}
case NORMAL: {
- // There is only one shared stub for loading normalized
- // properties. It does not traverse the prototype chain, so the
- // property must be found in the receiver for the stub to be
- // applicable.
- if (lookup->holder() != *receiver) return;
- code = StubCache::ComputeLoadNormal(*name, *receiver);
+ if (object->IsGlobalObject()) {
+ // The stub generated for the global object picks the value directly
+ // from the property cell. So the property must be directly on the
+ // global object.
+ Handle<GlobalObject> global = Handle<GlobalObject>::cast(object);
+ if (lookup->holder() != *global) return;
+ JSGlobalPropertyCell* cell =
+ JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
+ code = StubCache::ComputeLoadGlobal(*name, *global,
+ cell, lookup->IsDontDelete());
+ } else {
+ // There is only one shared stub for loading normalized
+ // properties. It does not traverse the prototype chain, so the
+ // property must be found in the receiver for the stub to be
+ // applicable.
+ if (lookup->holder() != *receiver) return;
+ code = StubCache::ComputeLoadNormal(*name, *receiver);
+ }
break;
}
case CALLBACKS: {
@@ -632,6 +688,7 @@
break;
}
case INTERCEPTOR: {
+ ASSERT(HasInterceptorGetter(lookup->holder()));
code = StubCache::ComputeLoadInterceptor(*name, *receiver,
lookup->holder());
break;
@@ -723,7 +780,7 @@
// Named lookup.
LookupResult lookup;
- object->Lookup(*name, &lookup);
+ LookupForRead(*object, *name, &lookup);
// If lookup is invalid, check if we need to throw an exception.
if (!lookup.IsValid()) {
@@ -817,6 +874,7 @@
break;
}
case INTERCEPTOR: {
+ ASSERT(HasInterceptorGetter(lookup->holder()));
code = StubCache::ComputeKeyedLoadInterceptor(*name, *receiver,
lookup->holder());
break;
@@ -863,9 +921,9 @@
}
-static bool LookupForStoreIC(JSObject* object,
- String* name,
- LookupResult* lookup) {
+static bool LookupForWrite(JSObject* object,
+ String* name,
+ LookupResult* lookup) {
object->LocalLookup(name, lookup);
if (!StoreICableLookup(lookup)) {
return false;
@@ -908,7 +966,7 @@
// Lookup the property locally in the receiver.
if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) {
LookupResult lookup;
- if (LookupForStoreIC(*receiver, *name, &lookup)) {
+ if (LookupForWrite(*receiver, *name, &lookup)) {
UpdateCaches(&lookup, state, receiver, name, value);
}
}
@@ -953,6 +1011,19 @@
code = StubCache::ComputeStoreField(*name, *receiver, index, *transition);
break;
}
+ case NORMAL: {
+ if (!receiver->IsGlobalObject()) {
+ return;
+ }
+ // The stub generated for the global object picks the value directly
+ // from the property cell. So the property must be directly on the
+ // global object.
+ Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver);
+ JSGlobalPropertyCell* cell =
+ JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
+ code = StubCache::ComputeStoreGlobal(*name, *global, cell);
+ break;
+ }
case CALLBACKS: {
if (!lookup->GetCallbackObject()->IsAccessorInfo()) return;
AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
@@ -961,6 +1032,7 @@
break;
}
case INTERCEPTOR: {
+ ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined());
code = StubCache::ComputeStoreInterceptor(*name, *receiver);
break;
}
diff --git a/src/interpreter-irregexp.cc b/src/interpreter-irregexp.cc
index 355fae4..6d616ab 100644
--- a/src/interpreter-irregexp.cc
+++ b/src/interpreter-irregexp.cc
@@ -115,17 +115,17 @@
}
-#define BYTECODE(name) \
- case BC_##name: \
- TraceInterpreter(code_base, \
- pc, \
- backtrack_sp - backtrack_stack, \
- current, \
- current_char, \
- BC_##name##_LENGTH, \
+#define BYTECODE(name) \
+ case BC_##name: \
+ TraceInterpreter(code_base, \
+ pc, \
+ backtrack_sp - backtrack_stack_base, \
+ current, \
+ current_char, \
+ BC_##name##_LENGTH, \
#name);
#else
-#define BYTECODE(name) \
+#define BYTECODE(name) \
case BC_##name:
#endif
@@ -150,9 +150,12 @@
uint32_t current_char) {
const byte* pc = code_base;
static const int kBacktrackStackSize = 10000;
- int backtrack_stack[kBacktrackStackSize];
+ // Use a SmartPointer here to ensure that the memory gets freed when the
+ // matching finishes.
+ SmartPointer<int> backtrack_stack(NewArray<int>(kBacktrackStackSize));
+ int* backtrack_stack_base = *backtrack_stack;
+ int* backtrack_sp = backtrack_stack_base;
int backtrack_stack_space = kBacktrackStackSize;
- int* backtrack_sp = backtrack_stack;
#ifdef DEBUG
if (FLAG_trace_regexp_bytecodes) {
PrintF("\n\nStart bytecode interpreter\n\n");
@@ -202,13 +205,13 @@
pc += BC_SET_CP_TO_REGISTER_LENGTH;
break;
BYTECODE(SET_REGISTER_TO_SP)
- registers[insn >> BYTECODE_SHIFT] = backtrack_sp - backtrack_stack;
+ registers[insn >> BYTECODE_SHIFT] = backtrack_sp - backtrack_stack_base;
pc += BC_SET_REGISTER_TO_SP_LENGTH;
break;
BYTECODE(SET_SP_TO_REGISTER)
- backtrack_sp = backtrack_stack + registers[insn >> BYTECODE_SHIFT];
+ backtrack_sp = backtrack_stack_base + registers[insn >> BYTECODE_SHIFT];
backtrack_stack_space = kBacktrackStackSize -
- (backtrack_sp - backtrack_stack);
+ (backtrack_sp - backtrack_stack_base);
pc += BC_SET_SP_TO_REGISTER_LENGTH;
break;
BYTECODE(POP_CP)
diff --git a/src/jsregexp.cc b/src/jsregexp.cc
index 7b294ec..879f671 100644
--- a/src/jsregexp.cc
+++ b/src/jsregexp.cc
@@ -51,6 +51,8 @@
#include "x64/regexp-macro-assembler-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/regexp-macro-assembler-arm.h"
+#else
+#error Unsupported target architecture.
#endif
#include "interpreter-irregexp.h"
diff --git a/src/log.cc b/src/log.cc
index 0dba08d..2ca89dd 100644
--- a/src/log.cc
+++ b/src/log.cc
@@ -176,8 +176,11 @@
~Ticker() { if (IsActive()) Stop(); }
+ void SampleStack(TickSample* sample) {
+ StackTracer::Trace(sample);
+ }
+
void Tick(TickSample* sample) {
- if (IsProfiling()) StackTracer::Trace(sample);
if (profiler_) profiler_->Insert(sample);
if (window_) window_->AddState(sample->state);
}
diff --git a/src/macro-assembler.h b/src/macro-assembler.h
index 116381b..983802e 100644
--- a/src/macro-assembler.h
+++ b/src/macro-assembler.h
@@ -47,6 +47,8 @@
#include "arm/assembler-arm-inl.h"
#include "code.h" // must be after assembler_*.h
#include "arm/macro-assembler-arm.h"
+#else
+#error Unsupported target architecture.
#endif
#endif // V8_MACRO_ASSEMBLER_H_
diff --git a/src/macros.py b/src/macros.py
index fdbdb58..c75f0ea 100644
--- a/src/macros.py
+++ b/src/macros.py
@@ -82,13 +82,16 @@
macro IS_STRING(arg) = (typeof(arg) === 'string');
macro IS_OBJECT(arg) = (typeof(arg) === 'object');
macro IS_BOOLEAN(arg) = (typeof(arg) === 'boolean');
-macro IS_REGEXP(arg) = %HasRegExpClass(arg);
-macro IS_ARRAY(arg) = %HasArrayClass(arg);
-macro IS_DATE(arg) = %HasDateClass(arg);
-macro IS_NUMBER_WRAPPER(arg) = %HasNumberClass(arg);
-macro IS_STRING_WRAPPER(arg) = %HasStringClass(arg);
-macro IS_ERROR(arg) = (%ClassOf(arg) === 'Error');
-macro IS_SCRIPT(arg) = (%ClassOf(arg) === 'Script');
+macro IS_ARRAY(arg) = (%_IsArray(arg));
+macro IS_REGEXP(arg) = (%_ClassOf(arg) === 'RegExp');
+macro IS_DATE(arg) = (%_ClassOf(arg) === 'Date');
+macro IS_NUMBER_WRAPPER(arg) = (%_ClassOf(arg) === 'Number');
+macro IS_STRING_WRAPPER(arg) = (%_ClassOf(arg) === 'String');
+macro IS_BOOLEAN_WRAPPER(arg) = (%_ClassOf(arg) === 'Boolean');
+macro IS_ERROR(arg) = (%_ClassOf(arg) === 'Error');
+macro IS_SCRIPT(arg) = (%_ClassOf(arg) === 'Script');
+macro IS_ARGUMENTS(arg) = (%_ClassOf(arg) === 'Arguments');
+macro IS_GLOBAL(arg) = (%_ClassOf(arg) === 'global');
macro FLOOR(arg) = %Math_floor(arg);
# Inline macros. Use %IS_VAR to make sure arg is evaluated only once.
@@ -111,6 +114,10 @@
# REGEXP_NUMBER_OF_CAPTURES
macro NUMBER_OF_CAPTURES(array) = ((array)[0]);
+# Gets the value of a Date object. If arg is not a Date object
+# a type error is thrown.
+macro DATE_VALUE(arg) = (%_ClassOf(arg) === 'Date' ? %_ValueOf(arg) : ThrowDateTypeError());
+
# Last input and last subject are after the captures so we can omit them on
# results returned from global searches. Beware - these evaluate their
# arguments twice.
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 89d97e9..5e46f2a 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -1141,7 +1141,7 @@
// We give non-live objects a map that will correctly give their size,
// since their existing map might not be live after the collection.
int size = object->Size();
- if (size >= Array::kHeaderSize) {
+ if (size >= ByteArray::kHeaderSize) {
object->set_map(Heap::byte_array_map());
ByteArray::cast(object)->set_length(ByteArray::LengthFor(size));
} else {
diff --git a/src/messages.js b/src/messages.js
index ec4b352..882fed5 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -557,55 +557,9 @@
function GetStackTraceLine(recv, fun, pos, isGlobal) {
- try {
- return UnsafeGetStackTraceLine(recv, fun, pos, isGlobal);
- } catch (e) {
- return "<error: " + e + ">";
- }
+ return FormatSourcePosition(new CallSite(recv, fun, pos));
}
-
-function GetFunctionName(fun, recv) {
- var name = %FunctionGetName(fun);
- if (name) return name;
- for (var prop in recv) {
- if (recv[prop] === fun)
- return prop;
- }
- return "[anonymous]";
-}
-
-
-function UnsafeGetStackTraceLine(recv, fun, pos, isTopLevel) {
- var result = "";
- // The global frame has no meaningful function or receiver
- if (!isTopLevel) {
- // If the receiver is not the global object then prefix the
- // message send
- if (recv !== global)
- result += ToDetailString(recv) + ".";
- result += GetFunctionName(fun, recv);
- }
- if (pos != -1) {
- var script = %FunctionGetScript(fun);
- var file;
- if (script) {
- file = %FunctionGetScript(fun).data;
- }
- if (file) {
- var location = %FunctionGetScript(fun).locationFromPosition(pos, true);
- if (!isTopLevel) result += "(";
- result += file;
- if (location != null) {
- result += ":" + (location.line + 1) + ":" + (location.column + 1);
- }
- if (!isTopLevel) result += ")";
- }
- }
- return (result) ? " at " + result : result;
-}
-
-
// ----------------------------------------------------------------------------
// Error implementation
@@ -632,6 +586,197 @@
});
}
+function CallSite(receiver, fun, pos) {
+ this.receiver = receiver;
+ this.fun = fun;
+ this.pos = pos;
+}
+
+CallSite.prototype.getThis = function () {
+ return this.receiver;
+};
+
+CallSite.prototype.getTypeName = function () {
+ var constructor = this.receiver.constructor;
+ if (!constructor)
+ return $Object.prototype.toString.call(this.receiver);
+ var constructorName = constructor.name;
+ if (!constructorName)
+ return $Object.prototype.toString.call(this.receiver);
+ return constructorName;
+};
+
+CallSite.prototype.isToplevel = function () {
+ if (this.receiver == null)
+ return true;
+ var className = $Object.prototype.toString.call(this.receiver);
+ return IS_GLOBAL(this.receiver);
+};
+
+CallSite.prototype.isEval = function () {
+ var script = %FunctionGetScript(this.fun);
+ return script && script.compilation_type == 1;
+};
+
+CallSite.prototype.getEvalOrigin = function () {
+ var script = %FunctionGetScript(this.fun);
+ if (!script || script.compilation_type != 1)
+ return null;
+ return new CallSite(null, script.eval_from_function,
+ script.eval_from_position);
+};
+
+CallSite.prototype.getFunctionName = function () {
+ // See if the function knows its own name
+ var name = this.fun.name;
+ if (name)
+ return name;
+ // See if we can find a unique property on the receiver that holds
+ // this function.
+ for (var prop in this.receiver) {
+ if (this.receiver[prop] === this.fun) {
+ // If we find more than one match bail out to avoid confusion
+ if (name)
+ return null;
+ name = prop;
+ }
+ }
+ if (name)
+ return name;
+ // Maybe this is an evaluation?
+ var script = %FunctionGetScript(this.fun);
+ if (script && script.compilation_type == 1)
+ return "eval";
+ return null;
+};
+
+CallSite.prototype.getFileName = function () {
+ var script = %FunctionGetScript(this.fun);
+ return script ? script.name : null;
+};
+
+CallSite.prototype.getLineNumber = function () {
+ if (this.pos == -1)
+ return null;
+ var script = %FunctionGetScript(this.fun);
+ var location = null;
+ if (script) {
+ location = script.locationFromPosition(this.pos, true);
+ }
+ return location ? location.line + 1 : null;
+};
+
+CallSite.prototype.getColumnNumber = function () {
+ if (this.pos == -1)
+ return null;
+ var script = %FunctionGetScript(this.fun);
+ var location = null;
+ if (script) {
+ location = script.locationFromPosition(this.pos, true);
+ }
+ return location ? location.column : null;
+};
+
+CallSite.prototype.isNative = function () {
+ var script = %FunctionGetScript(this.fun);
+ return script ? (script.type == 0) : false;
+};
+
+CallSite.prototype.getPosition = function () {
+ return this.pos;
+};
+
+CallSite.prototype.isConstructor = function () {
+ var constructor = this.receiver ? this.receiver.constructor : null;
+ if (!constructor)
+ return false;
+ return this.fun === constructor;
+};
+
+function FormatSourcePosition(frame) {
+ var fileLocation = "";
+ if (frame.isNative()) {
+ fileLocation = "native";
+ } else if (frame.isEval()) {
+ fileLocation = "eval at " + FormatSourcePosition(frame.getEvalOrigin());
+ } else {
+ var fileName = frame.getFileName();
+ if (fileName) {
+ fileLocation += fileName;
+ var lineNumber = frame.getLineNumber();
+ if (lineNumber != null) {
+ fileLocation += ":" + lineNumber;
+ var columnNumber = frame.getColumnNumber();
+ if (columnNumber) {
+ fileLocation += ":" + columnNumber;
+ }
+ }
+ }
+ }
+ if (!fileLocation) {
+ fileLocation = "unknown source";
+ }
+ var line = "";
+ var functionName = frame.getFunctionName();
+ if (functionName) {
+ if (frame.isToplevel()) {
+ line += functionName;
+ } else if (frame.isConstructor()) {
+ line += "new " + functionName;
+ } else {
+ line += frame.getTypeName() + "." + functionName;
+ }
+ line += " (" + fileLocation + ")";
+ } else {
+ line += fileLocation;
+ }
+ return line;
+}
+
+function FormatStackTrace(error, frames) {
+ var lines = [];
+ try {
+ lines.push(error.toString());
+ } catch (e) {
+ try {
+ lines.push("<error: " + e + ">");
+ } catch (ee) {
+ lines.push("<error>");
+ }
+ }
+ for (var i = 0; i < frames.length; i++) {
+ var frame = frames[i];
+ try {
+ var line = FormatSourcePosition(frame);
+ } catch (e) {
+ try {
+ var line = "<error: " + e + ">";
+ } catch (ee) {
+ // Any code that reaches this point is seriously nasty!
+ var line = "<error>";
+ }
+ }
+ lines.push(" at " + line);
+ }
+ return lines.join("\n");
+}
+
+function FormatRawStackTrace(error, raw_stack) {
+ var frames = [ ];
+ for (var i = 0; i < raw_stack.length; i += 3) {
+ var recv = raw_stack[i];
+ var fun = raw_stack[i+1];
+ var pc = raw_stack[i+2];
+ var pos = %FunctionGetPositionForOffset(fun, pc);
+ frames.push(new CallSite(recv, fun, pos));
+ }
+ if (IS_FUNCTION($Error.prepareStackTrace)) {
+ return $Error.prepareStackTrace(error, frames);
+ } else {
+ return FormatStackTrace(error, frames);
+ }
+}
+
function DefineError(f) {
// Store the error function in both the global object
// and the runtime object. The function is fetched
@@ -659,7 +804,7 @@
%SetProperty(f.prototype, 'constructor', f, DONT_ENUM);
f.prototype.name = name;
%SetCode(f, function(m) {
- if (%IsConstructCall()) {
+ if (%_IsConstructCall()) {
if (m === kAddMessageAccessorsMarker) {
DefineOneShotAccessor(this, 'message', function (obj) {
return FormatMessage({type: obj.type, args: obj.arguments});
@@ -667,6 +812,12 @@
} else if (!IS_UNDEFINED(m)) {
this.message = ToString(m);
}
+ if ($Error.captureStackTraces) {
+ var raw_stack = %CollectStackTrace(f);
+ DefineOneShotAccessor(this, 'stack', function (obj) {
+ return FormatRawStackTrace(obj, raw_stack);
+ });
+ }
} else {
return new f(m);
}
diff --git a/src/mirror-delay.js b/src/mirror-delay.js
index d0e8aa4..76ae75b 100644
--- a/src/mirror-delay.js
+++ b/src/mirror-delay.js
@@ -580,7 +580,7 @@
ObjectMirror.prototype.className = function() {
- return %ClassOf(this.value_);
+ return %_ClassOf(this.value_);
};
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index ba07af7..85b975b 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -152,7 +152,9 @@
case SHARED_FUNCTION_INFO_TYPE:
SharedFunctionInfo::cast(this)->SharedFunctionInfoPrint();
break;
-
+ case JS_GLOBAL_PROPERTY_CELL_TYPE:
+ JSGlobalPropertyCell::cast(this)->JSGlobalPropertyCellPrint();
+ break;
#define MAKE_STRUCT_CASE(NAME, Name, name) \
case NAME##_TYPE: \
Name::cast(this)->Name##Print(); \
@@ -214,6 +216,9 @@
case JS_BUILTINS_OBJECT_TYPE:
JSBuiltinsObject::cast(this)->JSBuiltinsObjectVerify();
break;
+ case JS_GLOBAL_PROPERTY_CELL_TYPE:
+ JSGlobalPropertyCell::cast(this)->JSGlobalPropertyCellVerify();
+ break;
case JS_ARRAY_TYPE:
JSArray::cast(this)->JSArrayVerify();
break;
@@ -392,6 +397,7 @@
case JS_OBJECT_TYPE: return "JS_OBJECT";
case JS_CONTEXT_EXTENSION_OBJECT_TYPE: return "JS_CONTEXT_EXTENSION_OBJECT";
case ODDBALL_TYPE: return "ODDBALL";
+ case JS_GLOBAL_PROPERTY_CELL_TYPE: return "JS_GLOBAL_PROPERTY_CELL";
case SHARED_FUNCTION_INFO_TYPE: return "SHARED_FUNCTION_INFO";
case JS_FUNCTION_TYPE: return "JS_FUNCTION";
case CODE_TYPE: return "CODE";
@@ -428,6 +434,9 @@
if (is_undetectable()) {
PrintF(" - undetectable\n");
}
+ if (needs_loading()) {
+ PrintF(" - needs_loading\n");
+ }
if (has_instance_call_handler()) {
PrintF(" - instance_call_handler\n");
}
@@ -653,6 +662,17 @@
}
+void JSGlobalPropertyCell::JSGlobalPropertyCellVerify() {
+ CHECK(IsJSGlobalPropertyCell());
+ VerifyObjectField(kValueOffset);
+}
+
+
+void JSGlobalPropertyCell::JSGlobalPropertyCellPrint() {
+ HeapObject::PrintHeader("JSGlobalPropertyCell");
+}
+
+
void Code::CodePrint() {
HeapObject::PrintHeader("Code");
#ifdef ENABLE_DISASSEMBLER
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 8c83715..c360fd7 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -53,6 +53,13 @@
}
+PropertyDetails PropertyDetails::AsDeleted() {
+ PropertyDetails d(DONT_ENUM, NORMAL);
+ Smi* smi = Smi::FromInt(AsSmi()->value() | DeletedField::encode(1));
+ return PropertyDetails(smi);
+}
+
+
#define CAST_ACCESSOR(type) \
type* type::cast(Object* object) { \
ASSERT(object->Is##type()); \
@@ -409,6 +416,13 @@
}
+bool Object::IsJSGlobalPropertyCell() {
+ return Object::IsHeapObject()
+ && HeapObject::cast(this)->map()->instance_type()
+ == JS_GLOBAL_PROPERTY_CELL_TYPE;
+}
+
+
bool Object::IsSharedFunctionInfo() {
return Object::IsHeapObject() &&
(HeapObject::cast(this)->map()->instance_type() ==
@@ -1046,6 +1060,8 @@
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
+ACCESSORS(JSGlobalPropertyCell, value, Object, kValueOffset)
+
int JSObject::GetHeaderSize() {
switch (map()->instance_type()) {
case JS_GLOBAL_PROXY_TYPE:
@@ -1403,6 +1419,7 @@
CAST_ACCESSOR(HeapObject)
CAST_ACCESSOR(HeapNumber)
CAST_ACCESSOR(Oddball)
+CAST_ACCESSOR(JSGlobalPropertyCell)
CAST_ACCESSOR(SharedFunctionInfo)
CAST_ACCESSOR(Map)
CAST_ACCESSOR(JSFunction)
diff --git a/src/objects.cc b/src/objects.cc
index ad57d17..a967e7b 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -138,7 +138,7 @@
} else if (IsBoolean()) {
holder = global_context->boolean_function()->instance_prototype();
}
- ASSERT(holder != NULL); // cannot handle null or undefined.
+ ASSERT(holder != NULL); // Cannot handle null or undefined.
JSObject::cast(holder)->Lookup(name, result);
}
@@ -399,6 +399,88 @@
}
+Object* JSObject::GetNormalizedProperty(LookupResult* result) {
+ ASSERT(!HasFastProperties());
+ Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
+ if (IsGlobalObject()) {
+ value = JSGlobalPropertyCell::cast(value)->value();
+ }
+ ASSERT(!value->IsJSGlobalPropertyCell());
+ return value;
+}
+
+
+Object* JSObject::SetNormalizedProperty(LookupResult* result, Object* value) {
+ ASSERT(!HasFastProperties());
+ if (IsGlobalObject()) {
+ JSGlobalPropertyCell* cell =
+ JSGlobalPropertyCell::cast(
+ property_dictionary()->ValueAt(result->GetDictionaryEntry()));
+ cell->set_value(value);
+ } else {
+ property_dictionary()->ValueAtPut(result->GetDictionaryEntry(), value);
+ }
+ return value;
+}
+
+
+Object* JSObject::SetNormalizedProperty(String* name,
+ Object* value,
+ PropertyDetails details) {
+ ASSERT(!HasFastProperties());
+ int entry = property_dictionary()->FindStringEntry(name);
+ if (entry == Dictionary::kNotFound) {
+ Object* store_value = value;
+ if (IsGlobalObject()) {
+ store_value = Heap::AllocateJSGlobalPropertyCell(value);
+ if (store_value->IsFailure()) return store_value;
+ }
+ Object* dict =
+ property_dictionary()->AddStringEntry(name, store_value, details);
+ if (dict->IsFailure()) return dict;
+ set_properties(Dictionary::cast(dict));
+ return value;
+ }
+ // Preserve enumeration index.
+ details = PropertyDetails(details.attributes(),
+ details.type(),
+ property_dictionary()->DetailsAt(entry).index());
+ if (IsGlobalObject()) {
+ JSGlobalPropertyCell* cell =
+ JSGlobalPropertyCell::cast(property_dictionary()->ValueAt(entry));
+ cell->set_value(value);
+ // Please note we have to update the property details.
+ property_dictionary()->DetailsAtPut(entry, details);
+ } else {
+ property_dictionary()->SetStringEntry(entry, name, value, details);
+ }
+ return value;
+}
+
+
+Object* JSObject::DeleteNormalizedProperty(String* name, DeleteMode mode) {
+ ASSERT(!HasFastProperties());
+ Dictionary* dictionary = property_dictionary();
+ int entry = dictionary->FindStringEntry(name);
+ if (entry != Dictionary::kNotFound) {
+ // If we have a global object set the cell to the hole.
+ if (IsGlobalObject()) {
+ PropertyDetails details = dictionary->DetailsAt(entry);
+ if (details.IsDontDelete() && mode != FORCE_DELETION) {
+ return Heap::false_value();
+ }
+ JSGlobalPropertyCell* cell =
+ JSGlobalPropertyCell::cast(dictionary->ValueAt(entry));
+ cell->set_value(Heap::the_hole_value());
+ dictionary->DetailsAtPut(entry, details.AsDeleted());
+ } else {
+ return dictionary->DeleteProperty(entry, mode);
+ }
+ }
+ return Heap::true_value();
+}
+
+
Object* Object::GetProperty(Object* receiver,
LookupResult* result,
String* name,
@@ -449,8 +531,7 @@
JSObject* holder = result->holder();
switch (result->type()) {
case NORMAL:
- value =
- holder->property_dictionary()->ValueAt(result->GetDictionaryEntry());
+ value = holder->GetNormalizedProperty(result);
ASSERT(!value->IsTheHole() || result->IsReadOnly());
return value->IsTheHole() ? Heap::undefined_value() : value;
case FIELD:
@@ -949,6 +1030,10 @@
case PROXY_TYPE:
accumulator->Add("<Proxy>");
break;
+ case JS_GLOBAL_PROPERTY_CELL_TYPE:
+ accumulator->Add("Cell for ");
+ JSGlobalPropertyCell::cast(this)->value()->ShortPrint(accumulator);
+ break;
default:
accumulator->Add("<Other heap object (%d)>", map()->instance_type());
break;
@@ -1042,6 +1127,10 @@
case CODE_TYPE:
reinterpret_cast<Code*>(this)->CodeIterateBody(v);
break;
+ case JS_GLOBAL_PROPERTY_CELL_TYPE:
+ reinterpret_cast<JSGlobalPropertyCell*>(this)
+ ->JSGlobalPropertyCellIterateBody(v);
+ break;
case HEAP_NUMBER_TYPE:
case FILLER_TYPE:
case BYTE_ARRAY_TYPE:
@@ -1250,12 +1339,27 @@
Object* JSObject::AddSlowProperty(String* name,
Object* value,
PropertyAttributes attributes) {
- PropertyDetails details = PropertyDetails(attributes, NORMAL);
- Object* result = property_dictionary()->AddStringEntry(name, value, details);
- if (result->IsFailure()) return result;
- if (property_dictionary() != result) {
- set_properties(Dictionary::cast(result));
+ ASSERT(!HasFastProperties());
+ Dictionary* dict = property_dictionary();
+ Object* store_value = value;
+ if (IsGlobalObject()) {
+ // In case name is an orphaned property reuse the cell.
+ int entry = dict->FindStringEntry(name);
+ if (entry != Dictionary::kNotFound) {
+ store_value = dict->ValueAt(entry);
+ JSGlobalPropertyCell::cast(store_value)->set_value(value);
+ PropertyDetails details = PropertyDetails(attributes, NORMAL);
+ dict->SetStringEntry(entry, name, store_value, details);
+ return value;
+ }
+ store_value = Heap::AllocateJSGlobalPropertyCell(value);
+ if (store_value->IsFailure()) return store_value;
+ JSGlobalPropertyCell::cast(store_value)->set_value(value);
}
+ PropertyDetails details = PropertyDetails(attributes, NORMAL);
+ Object* result = dict->AddStringEntry(name, store_value, details);
+ if (result->IsFailure()) return result;
+ if (dict != result) set_properties(Dictionary::cast(result));
return value;
}
@@ -1311,13 +1415,7 @@
}
PropertyDetails new_details(attributes, NORMAL, new_enumeration_index);
- Object* result =
- dictionary->SetOrAddStringEntry(name, value, new_details);
- if (result->IsFailure()) return result;
- if (dictionary != result) {
- set_properties(Dictionary::cast(result));
- }
- return value;
+ return SetNormalizedProperty(name, value, new_details);
}
Object* JSObject::ConvertDescriptorToFieldAndMapTransition(
@@ -1550,7 +1648,7 @@
if (JSObject::cast(pt)->HasFastElements()) continue;
Dictionary* dictionary = JSObject::cast(pt)->element_dictionary();
int entry = dictionary->FindNumberEntry(index);
- if (entry != -1) {
+ if (entry != Dictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
if (details.type() == CALLBACKS) {
@@ -1601,10 +1699,20 @@
}
} else {
int entry = property_dictionary()->FindStringEntry(name);
- if (entry != DescriptorArray::kNotFound) {
+ if (entry != Dictionary::kNotFound) {
// Make sure to disallow caching for uninitialized constants
// found in the dictionary-mode objects.
- if (property_dictionary()->ValueAt(entry)->IsTheHole()) {
+ Object* value = property_dictionary()->ValueAt(entry);
+ if (IsGlobalObject()) {
+ PropertyDetails d = property_dictionary()->DetailsAt(entry);
+ if (d.IsDeleted()) {
+ result->NotFound();
+ return;
+ }
+ value = JSGlobalPropertyCell::cast(value)->value();
+ ASSERT(result->IsLoaded());
+ }
+ if (value->IsTheHole()) {
result->DisallowCaching();
}
result->DictionaryResult(this, entry);
@@ -1736,8 +1844,7 @@
// transition or null descriptor and there are no setters in the prototypes.
switch (result->type()) {
case NORMAL:
- property_dictionary()->ValueAtPut(result->GetDictionaryEntry(), value);
- return value;
+ return SetNormalizedProperty(result, value);
case FIELD:
return FastPropertyAtPut(result->GetFieldIndex(), value);
case MAP_TRANSITION:
@@ -1819,8 +1926,7 @@
// Check of IsReadOnly removed from here in clone.
switch (result->type()) {
case NORMAL:
- property_dictionary()->ValueAtPut(result->GetDictionaryEntry(), value);
- return value;
+ return SetNormalizedProperty(result, value);
case FIELD:
return FastPropertyAtPut(result->GetFieldIndex(), value);
case MAP_TRANSITION:
@@ -2008,6 +2114,10 @@
PropertyDetails d =
PropertyDetails(details.attributes(), NORMAL, details.index());
Object* value = r.GetConstantFunction();
+ if (IsGlobalObject()) {
+ value = Heap::AllocateJSGlobalPropertyCell(value);
+ if (value->IsFailure()) return value;
+ }
Object* result = dictionary->AddStringEntry(r.GetKey(), value, d);
if (result->IsFailure()) return result;
dictionary = Dictionary::cast(result);
@@ -2017,6 +2127,10 @@
PropertyDetails d =
PropertyDetails(details.attributes(), NORMAL, details.index());
Object* value = FastPropertyAt(r.GetFieldIndex());
+ if (IsGlobalObject()) {
+ value = Heap::AllocateJSGlobalPropertyCell(value);
+ if (value->IsFailure()) return value;
+ }
Object* result = dictionary->AddStringEntry(r.GetKey(), value, d);
if (result->IsFailure()) return result;
dictionary = Dictionary::cast(result);
@@ -2026,6 +2140,10 @@
PropertyDetails d =
PropertyDetails(details.attributes(), CALLBACKS, details.index());
Object* value = r.GetCallbacksObject();
+ if (IsGlobalObject()) {
+ value = Heap::AllocateJSGlobalPropertyCell(value);
+ if (value->IsFailure()) return value;
+ }
Object* result = dictionary->AddStringEntry(r.GetKey(), value, d);
if (result->IsFailure()) return result;
dictionary = Dictionary::cast(result);
@@ -2085,8 +2203,9 @@
Object* JSObject::TransformToFastProperties(int unused_property_fields) {
if (HasFastProperties()) return this;
+ ASSERT(!IsGlobalObject());
return property_dictionary()->
- TransformPropertiesToFastFor(this, unused_property_fields);
+ TransformPropertiesToFastFor(this, unused_property_fields);
}
@@ -2139,12 +2258,7 @@
Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
if (obj->IsFailure()) return obj;
- ASSERT(!HasFastProperties());
- // Attempt to remove the property from the property dictionary.
- Dictionary* dictionary = property_dictionary();
- int entry = dictionary->FindStringEntry(name);
- if (entry != -1) return dictionary->DeleteProperty(entry, mode);
- return Heap::true_value();
+ return DeleteNormalizedProperty(name, mode);
}
@@ -2194,7 +2308,9 @@
ASSERT(!HasFastElements());
Dictionary* dictionary = element_dictionary();
int entry = dictionary->FindNumberEntry(index);
- if (entry != -1) return dictionary->DeleteProperty(entry, mode);
+ if (entry != Dictionary::kNotFound) {
+ return dictionary->DeleteProperty(entry, mode);
+ }
return Heap::true_value();
}
@@ -2266,7 +2382,9 @@
} else {
Dictionary* dictionary = element_dictionary();
int entry = dictionary->FindNumberEntry(index);
- if (entry != -1) return dictionary->DeleteProperty(entry, mode);
+ if (entry != Dictionary::kNotFound) {
+ return dictionary->DeleteProperty(entry, mode);
+ }
}
return Heap::true_value();
}
@@ -2318,10 +2436,7 @@
Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
if (obj->IsFailure()) return obj;
// Make sure the properties are normalized before removing the entry.
- Dictionary* dictionary = property_dictionary();
- int entry = dictionary->FindStringEntry(name);
- if (entry != -1) return dictionary->DeleteProperty(entry, mode);
- return Heap::true_value();
+ return DeleteNormalizedProperty(name, mode);
}
}
@@ -2574,7 +2689,7 @@
if (!HasFastElements()) {
Dictionary* dictionary = element_dictionary();
int entry = dictionary->FindNumberEntry(index);
- if (entry != -1) {
+ if (entry != Dictionary::kNotFound) {
Object* result = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
if (details.IsReadOnly()) return Heap::undefined_value();
@@ -2623,13 +2738,16 @@
Object* ok = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
if (ok->IsFailure()) return ok;
- // Update the dictionary with the new CALLBACKS property.
- Object* dict =
- property_dictionary()->SetOrAddStringEntry(name, structure, details);
- if (dict->IsFailure()) return dict;
+ // For the global object allocate a new map to invalidate the global inline
+ // caches which have a global property cell reference directly in the code.
+ if (IsGlobalObject()) {
+ Object* new_map = map()->CopyDropDescriptors();
+ if (new_map->IsFailure()) return new_map;
+ set_map(Map::cast(new_map));
+ }
- // Set the potential new dictionary on the object.
- set_properties(Dictionary::cast(dict));
+ // Update the dictionary with the new CALLBACKS property.
+ return SetNormalizedProperty(name, structure, details);
}
return structure;
@@ -2683,7 +2801,7 @@
if (!jsObject->HasFastElements()) {
Dictionary* dictionary = jsObject->element_dictionary();
int entry = dictionary->FindNumberEntry(index);
- if (entry != -1) {
+ if (entry != Dictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
if (details.type() == CALLBACKS) {
@@ -3974,6 +4092,11 @@
}
+void JSGlobalPropertyCell::JSGlobalPropertyCellIterateBody(ObjectVisitor* v) {
+ IteratePointers(v, kValueOffset, kValueOffset + kPointerSize);
+}
+
+
uint16_t ConsString::ConsStringGet(int index) {
ASSERT(index >= 0 && index < this->length());
@@ -4897,8 +5020,30 @@
}
+const char* Code::PropertyType2String(PropertyType type) {
+ switch (type) {
+ case NORMAL: return "NORMAL";
+ case FIELD: return "FIELD";
+ case CONSTANT_FUNCTION: return "CONSTANT_FUNCTION";
+ case CALLBACKS: return "CALLBACKS";
+ case INTERCEPTOR: return "INTERCEPTOR";
+ case MAP_TRANSITION: return "MAP_TRANSITION";
+ case CONSTANT_TRANSITION: return "CONSTANT_TRANSITION";
+ case NULL_DESCRIPTOR: return "NULL_DESCRIPTOR";
+ }
+ UNREACHABLE();
+ return NULL;
+}
+
void Code::Disassemble(const char* name) {
PrintF("kind = %s\n", Kind2String(kind()));
+ if (is_inline_cache_stub()) {
+ PrintF("ic_state = %s\n", ICState2String(ic_state()));
+ PrintF("ic_in_loop = %d\n", ic_in_loop() == IN_LOOP);
+ if (ic_state() == MONOMORPHIC) {
+ PrintF("type = %s\n", PropertyType2String(type()));
+ }
+ }
if ((name != NULL) && (name[0] != '\0')) {
PrintF("name = %s\n", name);
}
@@ -5095,7 +5240,9 @@
return true;
}
} else {
- if (element_dictionary()->FindNumberEntry(index) != -1) return true;
+ if (element_dictionary()->FindNumberEntry(index) != Dictionary::kNotFound) {
+ return true;
+ }
}
// Handle [] on String objects.
@@ -5170,7 +5317,8 @@
return (index < length) &&
!FixedArray::cast(elements())->get(index)->IsTheHole();
} else {
- return element_dictionary()->FindNumberEntry(index) != -1;
+ return element_dictionary()->FindNumberEntry(index)
+ != Dictionary::kNotFound;
}
}
@@ -5196,7 +5344,9 @@
if ((index < length) &&
!FixedArray::cast(elements())->get(index)->IsTheHole()) return true;
} else {
- if (element_dictionary()->FindNumberEntry(index) != -1) return true;
+ if (element_dictionary()->FindNumberEntry(index) != Dictionary::kNotFound) {
+ return true;
+ }
}
// Handle [] on String objects.
@@ -5332,7 +5482,7 @@
Dictionary* dictionary = Dictionary::cast(elms);
int entry = dictionary->FindNumberEntry(index);
- if (entry != -1) {
+ if (entry != Dictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
if (details.type() == CALLBACKS) {
@@ -5426,7 +5576,7 @@
} else {
Dictionary* dictionary = element_dictionary();
int entry = dictionary->FindNumberEntry(index);
- if (entry != -1) {
+ if (entry != Dictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
if (details.type() == CALLBACKS) {
@@ -5510,7 +5660,7 @@
} else {
Dictionary* dictionary = element_dictionary();
int entry = dictionary->FindNumberEntry(index);
- if (entry != -1) {
+ if (entry != Dictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
if (details.type() == CALLBACKS) {
@@ -5803,7 +5953,8 @@
return (index < length) &&
!FixedArray::cast(elements())->get(index)->IsTheHole();
}
- return element_dictionary()->FindNumberEntry(index) != -1;
+ return element_dictionary()->FindNumberEntry(index)
+ != Dictionary::kNotFound;
}
@@ -6328,7 +6479,7 @@
template <int prefix_size, int element_size>
int HashTable<prefix_size, element_size>::FindEntry(HashTableKey* key) {
uint32_t nof = NumberOfElements();
- if (nof == 0) return -1; // Bail out if empty.
+ if (nof == 0) return kNotFound; // Bail out if empty.
uint32_t capacity = Capacity();
uint32_t hash = key->Hash();
@@ -6338,17 +6489,17 @@
uint32_t passed_elements = 0;
if (!element->IsNull()) {
if (!element->IsUndefined() && key->IsMatch(element)) return entry;
- if (++passed_elements == nof) return -1;
+ if (++passed_elements == nof) return kNotFound;
}
for (uint32_t i = 1; !element->IsUndefined(); i++) {
entry = GetProbe(hash, i, capacity);
element = KeyAt(entry);
if (!element->IsNull()) {
if (!element->IsUndefined() && key->IsMatch(element)) return entry;
- if (++passed_elements == nof) return -1;
+ if (++passed_elements == nof) return kNotFound;
}
}
- return -1;
+ return kNotFound;
}
@@ -6588,6 +6739,14 @@
}
+Object* GlobalObject::GetPropertyCell(LookupResult* result) {
+ ASSERT(!HasFastProperties());
+ Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry());
+ ASSERT(value->IsJSGlobalPropertyCell());
+ return value;
+}
+
+
Object* SymbolTable::LookupString(String* string, Object** s) {
SymbolKey key(string);
return LookupKey(&key, s);
@@ -6597,7 +6756,7 @@
bool SymbolTable::LookupSymbolIfExists(String* string, String** symbol) {
SymbolKey key(string);
int entry = FindEntry(&key);
- if (entry == -1) {
+ if (entry == kNotFound) {
return false;
} else {
String* result = String::cast(KeyAt(entry));
@@ -6618,7 +6777,7 @@
int entry = FindEntry(key);
// Symbol already in table.
- if (entry != -1) {
+ if (entry != kNotFound) {
*s = KeyAt(entry);
return this;
}
@@ -6648,7 +6807,7 @@
Object* CompilationCacheTable::Lookup(String* src) {
StringKey key(src);
int entry = FindEntry(&key);
- if (entry == -1) return Heap::undefined_value();
+ if (entry == kNotFound) return Heap::undefined_value();
return get(EntryToIndex(entry) + 1);
}
@@ -6656,7 +6815,7 @@
Object* CompilationCacheTable::LookupEval(String* src, Context* context) {
StringSharedKey key(src, context->closure()->shared());
int entry = FindEntry(&key);
- if (entry == -1) return Heap::undefined_value();
+ if (entry == kNotFound) return Heap::undefined_value();
return get(EntryToIndex(entry) + 1);
}
@@ -6665,7 +6824,7 @@
JSRegExp::Flags flags) {
RegExpKey key(src, flags);
int entry = FindEntry(&key);
- if (entry == -1) return Heap::undefined_value();
+ if (entry == kNotFound) return Heap::undefined_value();
return get(EntryToIndex(entry) + 1);
}
@@ -6764,7 +6923,7 @@
Object* MapCache::Lookup(FixedArray* array) {
SymbolsKey key(array);
int entry = FindEntry(&key);
- if (entry == -1) return Heap::undefined_value();
+ if (entry == kNotFound) return Heap::undefined_value();
return get(EntryToIndex(entry) + 1);
}
@@ -6915,7 +7074,7 @@
int entry = FindEntry(key);
// If the entry is present set the value;
- if (entry != -1) {
+ if (entry != kNotFound) {
ValueAtPut(entry, value);
return this;
}
@@ -6988,7 +7147,7 @@
Object* value,
PropertyDetails details) {
StringKey k(key);
- SLOW_ASSERT(FindEntry(&k) == -1);
+ SLOW_ASSERT(FindEntry(&k) == kNotFound);
return Add(&k, value, details);
}
@@ -6998,17 +7157,11 @@
PropertyDetails details) {
NumberKey k(key);
UpdateMaxNumberKey(key);
- SLOW_ASSERT(FindEntry(&k) == -1);
+ SLOW_ASSERT(FindEntry(&k) == kNotFound);
return Add(&k, value, details);
}
-Object* Dictionary::AtStringPut(String* key, Object* value) {
- StringKey k(key);
- return AtPut(&k, value);
-}
-
-
Object* Dictionary::AtNumberPut(uint32_t key, Object* value) {
NumberKey k(key);
UpdateMaxNumberKey(key);
@@ -7016,12 +7169,10 @@
}
-Object* Dictionary::SetOrAddStringEntry(String* key,
- Object* value,
- PropertyDetails details) {
- StringKey k(key);
- int entry = FindEntry(&k);
- if (entry == -1) return AddStringEntry(key, value, details);
+Object* Dictionary::SetStringEntry(int entry,
+ String* key,
+ Object* value,
+ PropertyDetails details) {
// Preserve enumeration index.
details = PropertyDetails(details.attributes(),
details.type(),
@@ -7124,8 +7275,12 @@
int capacity = Capacity();
for (int i = 0; i < capacity; i++) {
Object* k = KeyAt(i);
- if (IsKey(k) && ValueAt(i) == value) {
- return k;
+ if (IsKey(k)) {
+ Object* e = ValueAt(i);
+ if (e->IsJSGlobalPropertyCell()) {
+ e = JSGlobalPropertyCell::cast(e)->value();
+ }
+ if (e == value) return k;
}
}
return Heap::undefined_value();
diff --git a/src/objects.h b/src/objects.h
index fd9af38..4ebe6a1 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -153,20 +153,23 @@
int index() { return IndexField::decode(value_); }
+ inline PropertyDetails AsDeleted();
+
static bool IsValidIndex(int index) { return IndexField::is_valid(index); }
bool IsReadOnly() { return (attributes() & READ_ONLY) != 0; }
bool IsDontDelete() { return (attributes() & DONT_DELETE) != 0; }
bool IsDontEnum() { return (attributes() & DONT_ENUM) != 0; }
+ bool IsDeleted() { return DeletedField::decode(value_) != 0;}
// Bit fields in value_ (type, shift, size). Must be public so the
// constants can be embedded in generated code.
class TypeField: public BitField<PropertyType, 0, 3> {};
class AttributesField: public BitField<PropertyAttributes, 3, 3> {};
- class IndexField: public BitField<uint32_t, 6, 32-6> {};
+ class DeletedField: public BitField<uint32_t, 6, 1> {};
+ class IndexField: public BitField<uint32_t, 7, 31-7> {};
static const int kInitialIndex = 1;
-
private:
uint32_t value_;
};
@@ -263,6 +266,7 @@
V(HEAP_NUMBER_TYPE) \
V(FIXED_ARRAY_TYPE) \
V(CODE_TYPE) \
+ V(JS_GLOBAL_PROPERTY_CELL_TYPE) \
V(ODDBALL_TYPE) \
V(PROXY_TYPE) \
V(BYTE_ARRAY_TYPE) \
@@ -547,6 +551,7 @@
FIXED_ARRAY_TYPE,
CODE_TYPE,
ODDBALL_TYPE,
+ JS_GLOBAL_PROPERTY_CELL_TYPE,
PROXY_TYPE,
BYTE_ARRAY_TYPE,
FILLER_TYPE,
@@ -684,6 +689,7 @@
inline bool IsJSGlobalProxy();
inline bool IsUndetectableObject();
inline bool IsAccessCheckNeeded();
+ inline bool IsJSGlobalPropertyCell();
// Returns true if this object is an instance of the specified
// function template.
@@ -1193,6 +1199,8 @@
// caching.
class JSObject: public HeapObject {
public:
+ enum DeleteMode { NORMAL_DELETION, FORCE_DELETION };
+
// [properties]: Backing storage for properties.
// properties is a FixedArray in the fast case, and a Dictionary in the
// slow case.
@@ -1243,6 +1251,23 @@
Object* value,
PropertyAttributes attributes);
+ // Retrieve a value in a normalized object given a lookup result.
+ // Handles the special representation of JS global objects.
+ Object* GetNormalizedProperty(LookupResult* result);
+
+ // Sets the property value in a normalized object given a lookup result.
+ // Handles the special representation of JS global objects.
+ Object* SetNormalizedProperty(LookupResult* result, Object* value);
+
+ // Sets the property value in a normalized object given (key, value, details).
+ // Handles the special representation of JS global objects.
+ Object* SetNormalizedProperty(String* name,
+ Object* value,
+ PropertyDetails details);
+
+ // Deletes the named property in a normalized object.
+ Object* DeleteNormalizedProperty(String* name, DeleteMode mode);
+
// Sets a property that currently has lazy loading.
Object* SetLazyProperty(LookupResult* result,
String* name,
@@ -1293,7 +1318,6 @@
return GetLocalPropertyAttribute(name) != ABSENT;
}
- enum DeleteMode { NORMAL_DELETION, FORCE_DELETION };
Object* DeleteProperty(String* name, DeleteMode mode);
Object* DeleteElement(uint32_t index, DeleteMode mode);
Object* DeleteLazyProperty(LookupResult* result,
@@ -1930,6 +1954,9 @@
static const int kElementsStartOffset =
kHeaderSize + kElementsStartIndex * kPointerSize;
+ // Constant used for denoting a absent entry.
+ static const int kNotFound = -1;
+
protected:
// Find entry for key otherwise return -1.
int FindEntry(HashTableKey* key);
@@ -2027,7 +2054,9 @@
class Dictionary: public DictionaryBase {
public:
// Returns the value at entry.
- Object* ValueAt(int entry) { return get(EntryToIndex(entry)+1); }
+ Object* ValueAt(int entry) {
+ return get(EntryToIndex(entry)+1);
+ }
// Set the value for entry.
void ValueAtPut(int entry, Object* value) {
@@ -2064,16 +2093,16 @@
Object* DeleteProperty(int entry, JSObject::DeleteMode mode);
// Type specific at put (default NONE attributes is used when adding).
- Object* AtStringPut(String* key, Object* value);
Object* AtNumberPut(uint32_t key, Object* value);
Object* AddStringEntry(String* key, Object* value, PropertyDetails details);
Object* AddNumberEntry(uint32_t key, Object* value, PropertyDetails details);
// Set an existing entry or add a new one if needed.
- Object* SetOrAddStringEntry(String* key,
- Object* value,
- PropertyDetails details);
+ Object* SetStringEntry(int entry,
+ String* key,
+ Object* value,
+ PropertyDetails details);
Object* SetOrAddNumberEntry(uint32_t key,
Object* value,
@@ -2252,6 +2281,7 @@
// Printing
static const char* Kind2String(Kind kind);
static const char* ICState2String(InlineCacheState state);
+ static const char* PropertyType2String(PropertyType type);
void Disassemble(const char* name);
#endif // ENABLE_DISASSEMBLER
@@ -2274,7 +2304,7 @@
// [flags]: Access to specific code flags.
inline Kind kind();
inline InlineCacheState ic_state(); // Only valid for IC stubs.
- inline InLoopFlag ic_in_loop(); // Only valid for IC stubs..
+ inline InLoopFlag ic_in_loop(); // Only valid for IC stubs.
inline PropertyType type(); // Only valid for monomorphic IC stubs.
inline int arguments_count(); // Only valid for call IC stubs.
@@ -2655,16 +2685,16 @@
public:
// Script types.
enum Type {
- TYPE_NATIVE,
- TYPE_EXTENSION,
- TYPE_NORMAL
+ TYPE_NATIVE = 0,
+ TYPE_EXTENSION = 1,
+ TYPE_NORMAL = 2
};
// Script compilation types.
enum CompilationType {
- COMPILATION_TYPE_HOST,
- COMPILATION_TYPE_EVAL,
- COMPILATION_TYPE_JSON
+ COMPILATION_TYPE_HOST = 0,
+ COMPILATION_TYPE_EVAL = 1,
+ COMPILATION_TYPE_JSON = 2
};
// [source]: the script source.
@@ -3029,6 +3059,9 @@
// [global receiver]: the global receiver object of the context
DECL_ACCESSORS(global_receiver, JSObject)
+ // Retrieve the property cell used to store a property.
+ Object* GetPropertyCell(LookupResult* result);
+
// Casting.
static inline GlobalObject* cast(Object* obj);
@@ -3048,6 +3081,7 @@
// JavaScript global object.
class JSGlobalObject: public GlobalObject {
public:
+
// Casting.
static inline JSGlobalObject* cast(Object* obj);
@@ -3936,6 +3970,31 @@
};
+class JSGlobalPropertyCell: public HeapObject {
+ public:
+ // [value]: value of the global property.
+ DECL_ACCESSORS(value, Object)
+
+ // Casting.
+ static inline JSGlobalPropertyCell* cast(Object* obj);
+
+ // Dispatched behavior.
+ void JSGlobalPropertyCellIterateBody(ObjectVisitor* v);
+#ifdef DEBUG
+ void JSGlobalPropertyCellVerify();
+ void JSGlobalPropertyCellPrint();
+#endif
+
+ // Layout description.
+ static const int kValueOffset = HeapObject::kHeaderSize;
+ static const int kSize = kValueOffset + kPointerSize;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSGlobalPropertyCell);
+};
+
+
+
// Proxy describes objects pointing from JavaScript to C structures.
// Since they cannot contain references to JS HeapObjects they can be
// placed in old_data_space.
diff --git a/src/platform-freebsd.cc b/src/platform-freebsd.cc
index acef74c..92d72f8 100644
--- a/src/platform-freebsd.cc
+++ b/src/platform-freebsd.cc
@@ -561,6 +561,7 @@
sample.sp = mcontext.mc_esp;
sample.fp = mcontext.mc_ebp;
#endif
+ active_sampler_->SampleStack(&sample);
}
// We always sample the VM state.
diff --git a/src/platform-linux.cc b/src/platform-linux.cc
index 39495ab..bccf9e6 100644
--- a/src/platform-linux.cc
+++ b/src/platform-linux.cc
@@ -639,6 +639,7 @@
sample.fp = mcontext.arm_fp;
#endif
#endif
+ active_sampler_->SampleStack(&sample);
}
// We always sample the VM state.
diff --git a/src/platform-macos.cc b/src/platform-macos.cc
index 5a0eae2..880931e 100644
--- a/src/platform-macos.cc
+++ b/src/platform-macos.cc
@@ -38,6 +38,7 @@
#include <pthread.h>
#include <semaphore.h>
#include <signal.h>
+#include <mach/mach.h>
#include <mach/semaphore.h>
#include <mach/task.h>
#include <sys/time.h>
@@ -475,63 +476,94 @@
#ifdef ENABLE_LOGGING_AND_PROFILING
-static Sampler* active_sampler_ = NULL;
+class Sampler::PlatformData : public Malloced {
+ public:
+ explicit PlatformData(Sampler* sampler)
+ : sampler_(sampler),
+ task_self_(mach_task_self()),
+ profiled_thread_(0),
+ sampler_thread_(0) {
+ }
-static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
- USE(info);
- if (signal != SIGPROF) return;
- if (active_sampler_ == NULL) return;
+ Sampler* sampler_;
+ // Note: for profiled_thread_ Mach primitives are used instead of PThread's
+ // because the latter doesn't provide thread manipulation primitives required.
+ // For details, consult "Mac OS X Internals" book, Section 7.3.
+ mach_port_t task_self_;
+ thread_act_t profiled_thread_;
+ pthread_t sampler_thread_;
- TickSample sample;
+ // Sampler thread handler.
+ void Runner() {
+ // Loop until the sampler is disengaged.
+ while (sampler_->IsActive()) {
+ TickSample sample;
- // If profiling, we extract the current pc and sp.
- if (active_sampler_->IsProfiling()) {
- // Extracting the sample from the context is extremely machine dependent.
- ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
- mcontext_t& mcontext = ucontext->uc_mcontext;
+ // If profiling, we record the pc and sp of the profiled thread.
+ if (sampler_->IsProfiling()
+ && KERN_SUCCESS == thread_suspend(profiled_thread_)) {
#if V8_HOST_ARCH_X64
- UNIMPLEMENTED();
- USE(mcontext);
- sample.pc = 0;
- sample.sp = 0;
- sample.fp = 0;
+ thread_state_flavor_t flavor = x86_THREAD_STATE64;
+ x86_thread_state64_t state;
+ mach_msg_type_number_t count = x86_THREAD_STATE64_COUNT;
#elif V8_HOST_ARCH_IA32
-#if __DARWIN_UNIX03
- sample.pc = mcontext->__ss.__eip;
- sample.sp = mcontext->__ss.__esp;
- sample.fp = mcontext->__ss.__ebp;
-#else // !__DARWIN_UNIX03
- sample.pc = mcontext->ss.eip;
- sample.sp = mcontext->ss.esp;
- sample.fp = mcontext->ss.ebp;
-#endif // __DARWIN_UNIX03
+ thread_state_flavor_t flavor = i386_THREAD_STATE;
+ i386_thread_state_t state;
+ mach_msg_type_number_t count = i386_THREAD_STATE_COUNT;
#else
#error Unsupported Mac OS X host architecture.
#endif // V8_TARGET_ARCH_IA32
+ if (thread_get_state(profiled_thread_,
+ flavor,
+ reinterpret_cast<natural_t*>(&state),
+ &count) == KERN_SUCCESS) {
+#if V8_HOST_ARCH_X64
+ UNIMPLEMENTED();
+ sample.pc = 0;
+ sample.sp = 0;
+ sample.fp = 0;
+#elif V8_HOST_ARCH_IA32
+#if __DARWIN_UNIX03
+ sample.pc = state.__eip;
+ sample.sp = state.__esp;
+ sample.fp = state.__ebp;
+#else // !__DARWIN_UNIX03
+ sample.pc = state.eip;
+ sample.sp = state.esp;
+ sample.fp = state.ebp;
+#endif // __DARWIN_UNIX03
+#else
+#error Unsupported Mac OS X host architecture.
+#endif // V8_HOST_ARCH_IA32
+ sampler_->SampleStack(&sample);
+ }
+ thread_resume(profiled_thread_);
+ }
+
+ // We always sample the VM state.
+ sample.state = Logger::state();
+ // Invoke tick handler with program counter and stack pointer.
+ sampler_->Tick(&sample);
+
+ // Wait until next sampling.
+ usleep(sampler_->interval_ * 1000);
+ }
}
-
- // We always sample the VM state.
- sample.state = Logger::state();
-
- active_sampler_->Tick(&sample);
-}
-
-
-class Sampler::PlatformData : public Malloced {
- public:
- PlatformData() {
- signal_handler_installed_ = false;
- }
-
- bool signal_handler_installed_;
- struct sigaction old_signal_handler_;
- struct itimerval old_timer_value_;
};
+// Entry point for sampler thread.
+static void* SamplerEntry(void* arg) {
+ Sampler::PlatformData* data =
+ reinterpret_cast<Sampler::PlatformData*>(arg);
+ data->Runner();
+ return 0;
+}
+
+
Sampler::Sampler(int interval, bool profiling)
: interval_(interval), profiling_(profiling), active_(false) {
- data_ = new PlatformData();
+ data_ = new PlatformData(this);
}
@@ -541,43 +573,40 @@
void Sampler::Start() {
- // There can only be one active sampler at the time on POSIX
- // platforms.
- if (active_sampler_ != NULL) return;
+ // If we are profiling, we need to be able to access the calling
+ // thread.
+ if (IsProfiling()) {
+ data_->profiled_thread_ = mach_thread_self();
+ }
- // Request profiling signals.
- struct sigaction sa;
- sa.sa_sigaction = ProfilerSignalHandler;
- sigemptyset(&sa.sa_mask);
- sa.sa_flags = SA_SIGINFO;
- if (sigaction(SIGPROF, &sa, &data_->old_signal_handler_) != 0) return;
- data_->signal_handler_installed_ = true;
+ // Create sampler thread with high priority.
+ // According to POSIX spec, when SCHED_FIFO policy is used, a thread
+ // runs until it exits or blocks.
+ pthread_attr_t sched_attr;
+ sched_param fifo_param;
+ pthread_attr_init(&sched_attr);
+ pthread_attr_setinheritsched(&sched_attr, PTHREAD_EXPLICIT_SCHED);
+ pthread_attr_setschedpolicy(&sched_attr, SCHED_FIFO);
+ fifo_param.sched_priority = sched_get_priority_max(SCHED_FIFO);
+ pthread_attr_setschedparam(&sched_attr, &fifo_param);
- // Set the itimer to generate a tick for each interval.
- itimerval itimer;
- itimer.it_interval.tv_sec = interval_ / 1000;
- itimer.it_interval.tv_usec = (interval_ % 1000) * 1000;
- itimer.it_value.tv_sec = itimer.it_interval.tv_sec;
- itimer.it_value.tv_usec = itimer.it_interval.tv_usec;
- setitimer(ITIMER_PROF, &itimer, &data_->old_timer_value_);
-
- // Set this sampler as the active sampler.
- active_sampler_ = this;
active_ = true;
+ pthread_create(&data_->sampler_thread_, &sched_attr, SamplerEntry, data_);
}
void Sampler::Stop() {
- // Restore old signal handler
- if (data_->signal_handler_installed_) {
- setitimer(ITIMER_PROF, &data_->old_timer_value_, NULL);
- sigaction(SIGPROF, &data_->old_signal_handler_, 0);
- data_->signal_handler_installed_ = false;
- }
-
- // This sampler is no longer the active sampler.
- active_sampler_ = NULL;
+ // Seting active to false triggers termination of the sampler
+ // thread.
active_ = false;
+
+ // Wait for sampler thread to terminate.
+ pthread_join(data_->sampler_thread_, NULL);
+
+ // Deallocate Mach port for thread.
+ if (IsProfiling()) {
+ mach_port_deallocate(data_->task_self_, data_->profiled_thread_);
+ }
}
#endif // ENABLE_LOGGING_AND_PROFILING
diff --git a/src/platform-win32.cc b/src/platform-win32.cc
index 1b0f9b2..a8a6243 100644
--- a/src/platform-win32.cc
+++ b/src/platform-win32.cc
@@ -1776,32 +1776,30 @@
TickSample sample;
// If profiling, we record the pc and sp of the profiled thread.
- if (sampler_->IsProfiling()) {
- // Pause the profiled thread and get its context.
- SuspendThread(profiled_thread_);
+ if (sampler_->IsProfiling()
+ && SuspendThread(profiled_thread_) != (DWORD)-1) {
context.ContextFlags = CONTEXT_FULL;
- GetThreadContext(profiled_thread_, &context);
- // Invoke tick handler with program counter and stack pointer.
+ if (GetThreadContext(profiled_thread_, &context) != 0) {
#if V8_HOST_ARCH_X64
- UNIMPLEMENTED();
- sample.pc = context.Rip;
- sample.sp = context.Rsp;
- sample.fp = context.Rbp;
+ UNIMPLEMENTED();
+ sample.pc = context.Rip;
+ sample.sp = context.Rsp;
+ sample.fp = context.Rbp;
#else
- sample.pc = context.Eip;
- sample.sp = context.Esp;
- sample.fp = context.Ebp;
+ sample.pc = context.Eip;
+ sample.sp = context.Esp;
+ sample.fp = context.Ebp;
#endif
+ sampler_->SampleStack(&sample);
+ }
+ ResumeThread(profiled_thread_);
}
// We always sample the VM state.
sample.state = Logger::state();
+ // Invoke tick handler with program counter and stack pointer.
sampler_->Tick(&sample);
- if (sampler_->IsProfiling()) {
- ResumeThread(profiled_thread_);
- }
-
// Wait until next sampling.
Sleep(sampler_->interval_);
}
diff --git a/src/platform.h b/src/platform.h
index b5123c5..11a1e79 100644
--- a/src/platform.h
+++ b/src/platform.h
@@ -510,6 +510,9 @@
explicit Sampler(int interval, bool profiling);
virtual ~Sampler();
+ // Performs stack sampling.
+ virtual void SampleStack(TickSample* sample) = 0;
+
// This method is called for each sampling period with the current
// program counter.
virtual void Tick(TickSample* sample) = 0;
@@ -527,8 +530,8 @@
class PlatformData;
private:
- int interval_;
- bool profiling_;
+ const int interval_;
+ const bool profiling_;
bool active_;
PlatformData* data_; // Platform specific data.
DISALLOW_IMPLICIT_CONSTRUCTORS(Sampler);
diff --git a/src/property.h b/src/property.h
index edab97a..69e5640 100644
--- a/src/property.h
+++ b/src/property.h
@@ -230,6 +230,7 @@
bool IsReadOnly() { return details_.IsReadOnly(); }
bool IsDontDelete() { return details_.IsDontDelete(); }
bool IsDontEnum() { return details_.IsDontEnum(); }
+ bool IsDeleted() { return details_.IsDeleted(); }
bool IsValid() { return lookup_type_ != NOT_FOUND; }
bool IsNotFound() { return lookup_type_ == NOT_FOUND; }
@@ -256,8 +257,14 @@
switch (type()) {
case FIELD:
return holder()->FastPropertyAt(GetFieldIndex());
- case NORMAL:
- return holder()->property_dictionary()->ValueAt(GetDictionaryEntry());
+ case NORMAL: {
+ Object* value;
+ value = holder()->property_dictionary()->ValueAt(GetDictionaryEntry());
+ if (holder()->IsGlobalObject()) {
+ value = JSGlobalPropertyCell::cast(value)->value();
+ }
+ return value;
+ }
case CONSTANT_FUNCTION:
return GetConstantFunction();
default:
@@ -306,7 +313,7 @@
}
// In the dictionary case, the data is held in the value field.
ASSERT(lookup_type_ == DICTIONARY_TYPE);
- return holder()->property_dictionary()->ValueAt(GetDictionaryEntry());
+ return holder()->GetNormalizedProperty(this);
}
private:
diff --git a/src/regexp-delay.js b/src/regexp-delay.js
index 8491863..14c3644 100644
--- a/src/regexp-delay.js
+++ b/src/regexp-delay.js
@@ -103,7 +103,7 @@
function RegExpConstructor(pattern, flags) {
- if (%IsConstructCall()) {
+ if (%_IsConstructCall()) {
DoConstructRegExp(this, pattern, flags, true);
} else {
// RegExp : Called as function; see ECMA-262, section 15.10.3.1.
diff --git a/src/runtime.cc b/src/runtime.cc
index dcff28b..e0eab74 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -413,48 +413,6 @@
}
-static Object* Runtime_HasStringClass(Arguments args) {
- return Heap::ToBoolean(args[0]->HasSpecificClassOf(Heap::String_symbol()));
-}
-
-
-static Object* Runtime_HasDateClass(Arguments args) {
- return Heap::ToBoolean(args[0]->HasSpecificClassOf(Heap::Date_symbol()));
-}
-
-
-static Object* Runtime_HasArrayClass(Arguments args) {
- return Heap::ToBoolean(args[0]->HasSpecificClassOf(Heap::Array_symbol()));
-}
-
-
-static Object* Runtime_HasFunctionClass(Arguments args) {
- return Heap::ToBoolean(
- args[0]->HasSpecificClassOf(Heap::function_class_symbol()));
-}
-
-
-static Object* Runtime_HasNumberClass(Arguments args) {
- return Heap::ToBoolean(args[0]->HasSpecificClassOf(Heap::Number_symbol()));
-}
-
-
-static Object* Runtime_HasBooleanClass(Arguments args) {
- return Heap::ToBoolean(args[0]->HasSpecificClassOf(Heap::Boolean_symbol()));
-}
-
-
-static Object* Runtime_HasArgumentsClass(Arguments args) {
- return Heap::ToBoolean(
- args[0]->HasSpecificClassOf(Heap::Arguments_symbol()));
-}
-
-
-static Object* Runtime_HasRegExpClass(Arguments args) {
- return Heap::ToBoolean(args[0]->HasSpecificClassOf(Heap::RegExp_symbol()));
-}
-
-
static Object* Runtime_IsInPrototypeChain(Arguments args) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
@@ -618,9 +576,6 @@
// property as read-only, so we don't either.
PropertyAttributes base = is_eval ? NONE : DONT_DELETE;
- // Only optimize the object if we intend to add more than 5 properties.
- OptimizedObjectForAddingMultipleProperties ba(global, pairs->length()/2 > 5);
-
// Traverse the name/value pairs and set the properties.
int length = pairs->length();
for (int i = 0; i < length; i += 2) {
@@ -814,17 +769,23 @@
PropertyAttributes attributes = DONT_DELETE;
// Lookup the property locally in the global object. If it isn't
- // there, we add the property and take special precautions to always
- // add it as a local property even in case of callbacks in the
- // prototype chain (this rules out using SetProperty).
- // We have IgnoreAttributesAndSetLocalProperty for this.
+ // there, there is a property with this name in the prototype chain.
+ // We follow Safari and Firefox behavior and only set the property
+ // locally if there is an explicit initialization value that we have
+ // to assign to the property. When adding the property we take
+ // special precautions to always add it as a local property even in
+ // case of callbacks in the prototype chain (this rules out using
+ // SetProperty). We have IgnoreAttributesAndSetLocalProperty for
+ // this.
LookupResult lookup;
global->LocalLookup(*name, &lookup);
if (!lookup.IsProperty()) {
- Object* value = (assign) ? args[1] : Heap::undefined_value();
- return global->IgnoreAttributesAndSetLocalProperty(*name,
- value,
- attributes);
+ if (assign) {
+ return global->IgnoreAttributesAndSetLocalProperty(*name,
+ args[1],
+ attributes);
+ }
+ return Heap::undefined_value();
}
// Determine if this is a redeclaration of something read-only.
@@ -932,10 +893,8 @@
properties->set(index, *value);
}
} else if (type == NORMAL) {
- Dictionary* dictionary = global->property_dictionary();
- int entry = lookup.GetDictionaryEntry();
- if (dictionary->ValueAt(entry)->IsTheHole()) {
- dictionary->ValueAtPut(entry, *value);
+ if (global->GetNormalizedProperty(&lookup)->IsTheHole()) {
+ global->SetNormalizedProperty(&lookup, *value);
}
} else {
// Ignore re-initialization of constants that have already been
@@ -1025,10 +984,8 @@
properties->set(index, *value);
}
} else if (type == NORMAL) {
- Dictionary* dictionary = context_ext->property_dictionary();
- int entry = lookup.GetDictionaryEntry();
- if (dictionary->ValueAt(entry)->IsTheHole()) {
- dictionary->ValueAtPut(entry, *value);
+ if (context_ext->GetNormalizedProperty(&lookup)->IsTheHole()) {
+ context_ext->SetNormalizedProperty(&lookup, *value);
}
} else {
// We should not reach here. Any real, named property should be
@@ -1156,6 +1113,21 @@
}
+static Object* Runtime_FunctionGetPositionForOffset(Arguments args) {
+ ASSERT(args.length() == 2);
+
+ CONVERT_CHECKED(JSFunction, fun, args[0]);
+ CONVERT_NUMBER_CHECKED(int, offset, Int32, args[1]);
+
+ Code* code = fun->code();
+ RUNTIME_ASSERT(0 <= offset && offset < code->Size());
+
+ Address pc = code->address() + offset;
+ return Smi::FromInt(fun->code()->SourcePosition(pc));
+}
+
+
+
static Object* Runtime_FunctionSetInstanceClassName(Arguments args) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
@@ -2640,9 +2612,13 @@
// Attempt dictionary lookup.
Dictionary* dictionary = receiver->property_dictionary();
int entry = dictionary->FindStringEntry(key);
- if ((entry != DescriptorArray::kNotFound) &&
+ if ((entry != Dictionary::kNotFound) &&
(dictionary->DetailsAt(entry).type() == NORMAL)) {
- return dictionary->ValueAt(entry);
+ Object* value = dictionary->ValueAt(entry);
+ if (receiver->IsGlobalObject()) {
+ value = JSGlobalPropertyCell::cast(value)->value();
+ }
+ return value;
}
}
}
@@ -4522,17 +4498,25 @@
// compiler to do the right thing.
//
// TODO(1236026): This is a non-portable hack that should be removed.
-// TODO(x64): Definitely!
+#ifdef V8_HOST_ARCH_64_BIT
+// Tested with GCC, not with MSVC.
+struct ObjectPair {
+ Object* x;
+ Object* y;
+};
+static inline ObjectPair MakePair(Object* x, Object* y) {
+ ObjectPair result = {x, y};
+ return result; // Pointers x and y returned in rax and rdx, in AMD-x64-abi.
+}
+#else
typedef uint64_t ObjectPair;
static inline ObjectPair MakePair(Object* x, Object* y) {
-#if V8_HOST_ARCH_64_BIT
- UNIMPLEMENTED();
- return 0;
-#else
return reinterpret_cast<uint32_t>(x) |
(reinterpret_cast<ObjectPair>(y) << 32);
-#endif
}
+#endif
+
+
static inline Object* Unhole(Object* x, PropertyAttributes attributes) {
@@ -5560,15 +5544,12 @@
bool* caught_exception) {
Object* value;
switch (result->type()) {
- case NORMAL: {
- Dictionary* dict =
- JSObject::cast(result->holder())->property_dictionary();
- value = dict->ValueAt(result->GetDictionaryEntry());
+ case NORMAL:
+ value = result->holder()->GetNormalizedProperty(result);
if (value->IsTheHole()) {
return Heap::undefined_value();
}
return value;
- }
case FIELD:
value =
JSObject::cast(
@@ -7408,6 +7389,67 @@
}
+// Determines whether the given stack frame should be displayed in
+// a stack trace. The caller is the error constructor that asked
+// for the stack trace to be collected. The first time a construct
+// call to this function is encountered it is skipped. The seen_caller
+// in/out parameter is used to remember if the caller has been seen
+// yet.
+static bool ShowFrameInStackTrace(StackFrame* raw_frame, Object* caller,
+ bool* seen_caller) {
+ // Only display JS frames.
+ if (!raw_frame->is_java_script())
+ return false;
+ JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame);
+ Object* raw_fun = frame->function();
+ // Not sure when this can happen but skip it just in case.
+ if (!raw_fun->IsJSFunction())
+ return false;
+ if ((raw_fun == caller) && !(*seen_caller) && frame->IsConstructor()) {
+ *seen_caller = true;
+ return false;
+ }
+ // Skip the most obvious builtin calls. Some builtin calls (such as
+ // Number.ADD which is invoked using 'call') are very difficult to
+ // recognize so we're leaving them in for now.
+ return !frame->receiver()->IsJSBuiltinsObject();
+}
+
+
+// Collect the raw data for a stack trace. Returns an array of three
+// element segments each containing a receiver, function and native
+// code offset.
+static Object* Runtime_CollectStackTrace(Arguments args) {
+ ASSERT_EQ(args.length(), 1);
+ Object* caller = args[0];
+
+ StackFrameIterator iter;
+ int frame_count = 0;
+ bool seen_caller = false;
+ while (!iter.done()) {
+ if (ShowFrameInStackTrace(iter.frame(), caller, &seen_caller))
+ frame_count++;
+ iter.Advance();
+ }
+ HandleScope scope;
+ Handle<JSArray> result = Factory::NewJSArray(frame_count * 3);
+ int i = 0;
+ seen_caller = false;
+ for (iter.Reset(); !iter.done(); iter.Advance()) {
+ StackFrame* raw_frame = iter.frame();
+ if (ShowFrameInStackTrace(raw_frame, caller, &seen_caller)) {
+ JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame);
+ result->SetElement(i++, frame->receiver());
+ result->SetElement(i++, frame->function());
+ Address pc = frame->pc();
+ Address start = frame->code()->address();
+ result->SetElement(i++, Smi::FromInt(pc - start));
+ }
+ }
+ return *result;
+}
+
+
static Object* Runtime_Abort(Arguments args) {
ASSERT(args.length() == 2);
OS::PrintError("abort: %s\n", reinterpret_cast<char*>(args[0]) +
diff --git a/src/runtime.h b/src/runtime.h
index 15dd9b4..36e274a 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -169,18 +169,12 @@
F(FunctionGetSourceCode, 1) \
F(FunctionGetScript, 1) \
F(FunctionGetScriptSourcePosition, 1) \
+ F(FunctionGetPositionForOffset, 2) \
F(FunctionIsAPIFunction, 1) \
F(GetScript, 1) \
+ F(CollectStackTrace, 1) \
\
F(ClassOf, 1) \
- F(HasDateClass, 1) \
- F(HasStringClass, 1) \
- F(HasArrayClass, 1) \
- F(HasFunctionClass, 1) \
- F(HasNumberClass, 1) \
- F(HasBooleanClass, 1) \
- F(HasArgumentsClass, 1) \
- F(HasRegExpClass, 1) \
F(SetCode, 2) \
\
F(CreateApiFunction, 1) \
diff --git a/src/runtime.js b/src/runtime.js
index df26b88..25cc5ba 100644
--- a/src/runtime.js
+++ b/src/runtime.js
@@ -394,7 +394,7 @@
// First check whether length is a positive Smi and args is an
// array. This is the fast case. If this fails, we do the slow case
// that takes care of more eventualities.
- if (%_IsArray(args)) {
+ if (IS_ARRAY(args)) {
length = args.length;
if (%_IsSmi(length) && length >= 0 && length < 0x800000 && IS_FUNCTION(this)) {
return length;
@@ -415,9 +415,7 @@
}
// Make sure the arguments list has the right type.
- if (args != null &&
- !%HasArrayClass(args) &&
- !%HasArgumentsClass(args)) {
+ if (args != null && !IS_ARRAY(args) && !IS_ARGUMENTS(args)) {
throw %MakeTypeError('apply_wrong_args', []);
}
diff --git a/src/serialize.cc b/src/serialize.cc
index f45d65d..7e38494 100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -699,6 +699,7 @@
UNCLASSIFIED,
10,
"Debug::step_in_fp_addr()");
+#endif
Add(ExternalReference::double_fp_operation(Token::ADD).address(),
UNCLASSIFIED,
11,
@@ -711,7 +712,10 @@
UNCLASSIFIED,
13,
"mul_two_doubles");
-#endif
+ Add(ExternalReference::compare_doubles().address(),
+ UNCLASSIFIED,
+ 14,
+ "compare_doubles");
}
diff --git a/src/spaces.cc b/src/spaces.cc
index 72b028c..077bcab 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -1265,7 +1265,7 @@
// If the block is too small (eg, one or two words), to hold both a size
// field and a next pointer, we give it a filler map that gives it the
// correct size.
- if (size_in_bytes > Array::kHeaderSize) {
+ if (size_in_bytes > ByteArray::kHeaderSize) {
set_map(Heap::byte_array_map());
ByteArray::cast(this)->set_length(ByteArray::LengthFor(size_in_bytes));
} else if (size_in_bytes == kPointerSize) {
diff --git a/src/spaces.h b/src/spaces.h
index 0538c5f..8ce807f 100644
--- a/src/spaces.h
+++ b/src/spaces.h
@@ -1270,7 +1270,7 @@
inline void set_next(Address next);
private:
- static const int kNextOffset = Array::kHeaderSize;
+ static const int kNextOffset = POINTER_SIZE_ALIGN(ByteArray::kHeaderSize);
DISALLOW_IMPLICIT_CONSTRUCTORS(FreeListNode);
};
@@ -1304,7 +1304,8 @@
private:
// The size range of blocks, in bytes. (Smaller allocations are allowed, but
// will always result in waste.)
- static const int kMinBlockSize = Array::kHeaderSize + kPointerSize;
+ static const int kMinBlockSize =
+ POINTER_SIZE_ALIGN(ByteArray::kHeaderSize) + kPointerSize;
static const int kMaxBlockSize = Page::kMaxHeapObjectSize;
// The identity of the owning space, for building allocation Failure
diff --git a/src/string.js b/src/string.js
index 3d8a11b..6164eb8 100644
--- a/src/string.js
+++ b/src/string.js
@@ -35,7 +35,7 @@
// Set the String function and constructor.
%SetCode($String, function(x) {
var value = %_ArgumentsLength() == 0 ? '' : ToString(x);
- if (%IsConstructCall()) {
+ if (%_IsConstructCall()) {
%_SetValueOf(this, value);
} else {
return value;
@@ -46,7 +46,7 @@
// ECMA-262 section 15.5.4.2
function StringToString() {
- if (!IS_STRING(this) && !%HasStringClass(this))
+ if (!IS_STRING(this) && !IS_STRING_WRAPPER(this))
throw new $TypeError('String.prototype.toString is not generic');
return %_ValueOf(this);
}
@@ -54,7 +54,7 @@
// ECMA-262 section 15.5.4.3
function StringValueOf() {
- if (!IS_STRING(this) && !%HasStringClass(this))
+ if (!IS_STRING(this) && !IS_STRING_WRAPPER(this))
throw new $TypeError('String.prototype.valueOf is not generic');
return %_ValueOf(this);
}
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 0c80378..f7ba9f3 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -172,6 +172,24 @@
}
+Object* StubCache::ComputeLoadGlobal(String* name,
+ GlobalObject* receiver,
+ JSGlobalPropertyCell* cell,
+ bool is_dont_delete) {
+ Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL);
+ Object* code = receiver->map()->FindInCodeCache(name, flags);
+ if (code->IsUndefined()) {
+ LoadStubCompiler compiler;
+ code = compiler.CompileLoadGlobal(receiver, cell, name, is_dont_delete);
+ if (code->IsFailure()) return code;
+ LOG(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
+ Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code));
+ if (result->IsFailure()) return code;
+ }
+ return Set(name, receiver->map(), Code::cast(code));
+}
+
+
Object* StubCache::ComputeKeyedLoadField(String* name,
JSObject* receiver,
JSObject* holder,
@@ -317,6 +335,23 @@
}
+Object* StubCache::ComputeStoreGlobal(String* name,
+ GlobalObject* receiver,
+ JSGlobalPropertyCell* cell) {
+ Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, NORMAL);
+ Object* code = receiver->map()->FindInCodeCache(name, flags);
+ if (code->IsUndefined()) {
+ StoreStubCompiler compiler;
+ code = compiler.CompileStoreGlobal(receiver, cell, name);
+ if (code->IsFailure()) return code;
+ LOG(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
+ Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code));
+ if (result->IsFailure()) return code;
+ }
+ return Set(name, receiver->map(), Code::cast(code));
+}
+
+
Object* StubCache::ComputeStoreCallback(String* name,
JSObject* receiver,
AccessorInfo* callback) {
@@ -409,9 +444,10 @@
// caches.
if (!function->is_compiled()) return Failure::InternalError();
// Compile the stub - only create stubs for fully compiled functions.
- CallStubCompiler compiler(argc);
- code = compiler.CompileCallConstant(object, holder, function, check, flags);
+ CallStubCompiler compiler(argc, in_loop);
+ code = compiler.CompileCallConstant(object, holder, function, check);
if (code->IsFailure()) return code;
+ ASSERT_EQ(flags, Code::cast(code)->flags());
LOG(CodeCreateEvent(Logger::CALL_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
@@ -442,9 +478,10 @@
argc);
Object* code = map->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- CallStubCompiler compiler(argc);
- code = compiler.CompileCallField(object, holder, index, name, flags);
+ CallStubCompiler compiler(argc, in_loop);
+ code = compiler.CompileCallField(object, holder, index, name);
if (code->IsFailure()) return code;
+ ASSERT_EQ(flags, Code::cast(code)->flags());
LOG(CodeCreateEvent(Logger::CALL_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
@@ -475,9 +512,10 @@
argc);
Object* code = map->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
- CallStubCompiler compiler(argc);
+ CallStubCompiler compiler(argc, NOT_IN_LOOP);
code = compiler.CompileCallInterceptor(object, holder, name);
if (code->IsFailure()) return code;
+ ASSERT_EQ(flags, Code::cast(code)->flags());
LOG(CodeCreateEvent(Logger::CALL_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
@@ -496,6 +534,33 @@
}
+Object* StubCache::ComputeCallGlobal(int argc,
+ InLoopFlag in_loop,
+ String* name,
+ GlobalObject* receiver,
+ JSGlobalPropertyCell* cell,
+ JSFunction* function) {
+ Code::Flags flags =
+ Code::ComputeMonomorphicFlags(Code::CALL_IC, NORMAL, in_loop, argc);
+ Object* code = receiver->map()->FindInCodeCache(name, flags);
+ if (code->IsUndefined()) {
+ // If the function hasn't been compiled yet, we cannot do it now
+ // because it may cause GC. To avoid this issue, we return an
+ // internal error which will make sure we do not update any
+ // caches.
+ if (!function->is_compiled()) return Failure::InternalError();
+ CallStubCompiler compiler(argc, in_loop);
+ code = compiler.CompileCallGlobal(receiver, cell, function, name);
+ if (code->IsFailure()) return code;
+ ASSERT_EQ(flags, Code::cast(code)->flags());
+ LOG(CodeCreateEvent(Logger::CALL_IC_TAG, Code::cast(code), name));
+ Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code));
+ if (result->IsFailure()) return code;
+ }
+ return Set(name, receiver->map(), Code::cast(code));
+}
+
+
static Object* GetProbeValue(Code::Flags flags) {
Dictionary* dictionary = Heap::non_monomorphic_cache();
int entry = dictionary->FindNumberEntry(flags);
@@ -933,7 +998,7 @@
int argc = arguments_.immediate();
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::CALL_IC,
type,
- NOT_IN_LOOP,
+ in_loop_,
argc);
return GetCodeWithFlags(flags, name);
}
diff --git a/src/stub-cache.h b/src/stub-cache.h
index b79841a..fff5613 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -78,6 +78,12 @@
static Object* ComputeLoadNormal(String* name, JSObject* receiver);
+ static Object* ComputeLoadGlobal(String* name,
+ GlobalObject* receiver,
+ JSGlobalPropertyCell* cell,
+ bool is_dont_delete);
+
+
// ---
static Object* ComputeKeyedLoadField(String* name,
@@ -112,6 +118,10 @@
int field_index,
Map* transition = NULL);
+ static Object* ComputeStoreGlobal(String* name,
+ GlobalObject* receiver,
+ JSGlobalPropertyCell* cell);
+
static Object* ComputeStoreCallback(String* name,
JSObject* receiver,
AccessorInfo* callback);
@@ -151,6 +161,13 @@
Object* object,
JSObject* holder);
+ static Object* ComputeCallGlobal(int argc,
+ InLoopFlag in_loop,
+ String* name,
+ GlobalObject* receiver,
+ JSGlobalPropertyCell* cell,
+ JSFunction* function);
+
// ---
static Object* ComputeCallInitialize(int argc, InLoopFlag in_loop);
@@ -416,6 +433,11 @@
JSObject* holder,
String* name);
+ Object* CompileLoadGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* holder,
+ String* name,
+ bool is_dont_delete);
+
private:
Object* GetCode(PropertyType type, String* name);
};
@@ -457,6 +479,10 @@
AccessorInfo* callbacks,
String* name);
Object* CompileStoreInterceptor(JSObject* object, String* name);
+ Object* CompileStoreGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* holder,
+ String* name);
+
private:
Object* GetCode(PropertyType type, String* name);
@@ -477,24 +503,28 @@
class CallStubCompiler: public StubCompiler {
public:
- explicit CallStubCompiler(int argc) : arguments_(argc) { }
+ explicit CallStubCompiler(int argc, InLoopFlag in_loop)
+ : arguments_(argc), in_loop_(in_loop) { }
Object* CompileCallField(Object* object,
JSObject* holder,
int index,
- String* name,
- Code::Flags flags);
+ String* name);
Object* CompileCallConstant(Object* object,
JSObject* holder,
JSFunction* function,
- CheckType check,
- Code::Flags flags);
+ CheckType check);
Object* CompileCallInterceptor(Object* object,
JSObject* holder,
String* name);
+ Object* CompileCallGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ JSFunction* function,
+ String* name);
private:
const ParameterCount arguments_;
+ const InLoopFlag in_loop_;
const ParameterCount& arguments() { return arguments_; }
diff --git a/src/v8-counters.h b/src/v8-counters.h
index 06f116e..a62cd74 100644
--- a/src/v8-counters.h
+++ b/src/v8-counters.h
@@ -130,9 +130,15 @@
SC(keyed_load_inline_miss, V8.KeyedLoadInlineMiss) \
SC(named_load_inline, V8.NamedLoadInline) \
SC(named_load_inline_miss, V8.NamedLoadInlineMiss) \
+ SC(named_load_global_inline, V8.NamedLoadGlobalInline) \
+ SC(named_load_global_inline_miss, V8.NamedLoadGlobalInlineMiss) \
SC(keyed_store_field, V8.KeyedStoreField) \
SC(keyed_store_inline, V8.KeyedStoreInline) \
SC(keyed_store_inline_miss, V8.KeyedStoreInlineMiss) \
+ SC(named_store_global_inline, V8.NamedStoreGlobalInline) \
+ SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \
+ SC(call_global_inline, V8.CallGlobalInline) \
+ SC(call_global_inline_miss, V8.CallGlobalInlineMiss) \
SC(for_in, V8.ForIn) \
SC(enum_cache_hits, V8.EnumCacheHits) \
SC(enum_cache_misses, V8.EnumCacheMisses) \
diff --git a/src/v8natives.js b/src/v8natives.js
index fe46351..841c920 100644
--- a/src/v8natives.js
+++ b/src/v8natives.js
@@ -154,7 +154,7 @@
// ECMA-262 - 15.1.1.3.
%SetProperty(global, "undefined", void 0, DONT_ENUM | DONT_DELETE);
-
+
// Setup non-enumerable function on the global object.
InstallFunctions(global, DONT_ENUM, $Array(
"isNaN", GlobalIsNaN,
@@ -174,7 +174,7 @@
%SetCode($Boolean, function(x) {
- if (%IsConstructCall()) {
+ if (%_IsConstructCall()) {
%_SetValueOf(this, ToBoolean(x));
} else {
return ToBoolean(x);
@@ -192,7 +192,7 @@
// ECMA-262 - 15.2.4.2
function ObjectToString() {
- var c = %ClassOf(this);
+ var c = %_ClassOf(this);
// Hide Arguments from the outside.
if (c === 'Arguments') c = 'Object';
return "[object " + c + "]";
@@ -273,7 +273,7 @@
%SetCode($Object, function(x) {
- if (%IsConstructCall()) {
+ if (%_IsConstructCall()) {
if (x == null) return this;
return ToObject(x);
} else {
@@ -311,7 +311,7 @@
function BooleanToString() {
// NOTE: Both Boolean objects and values can enter here as
// 'this'. This is not as dictated by ECMA-262.
- if (!IS_BOOLEAN(this) && !%HasBooleanClass(this))
+ if (!IS_BOOLEAN(this) && !IS_BOOLEAN_WRAPPER(this))
throw new $TypeError('Boolean.prototype.toString is not generic');
return ToString(%_ValueOf(this));
}
@@ -320,7 +320,7 @@
function BooleanValueOf() {
// NOTE: Both Boolean objects and values can enter here as
// 'this'. This is not as dictated by ECMA-262.
- if (!IS_BOOLEAN(this) && !%HasBooleanClass(this))
+ if (!IS_BOOLEAN(this) && !IS_BOOLEAN_WRAPPER(this))
throw new $TypeError('Boolean.prototype.valueOf is not generic');
return %_ValueOf(this);
}
@@ -350,7 +350,7 @@
// Set the Number function and constructor.
%SetCode($Number, function(x) {
var value = %_ArgumentsLength() == 0 ? 0 : ToNumber(x);
- if (%IsConstructCall()) {
+ if (%_IsConstructCall()) {
%_SetValueOf(this, value);
} else {
return value;
@@ -365,7 +365,7 @@
// 'this'. This is not as dictated by ECMA-262.
var number = this;
if (!IS_NUMBER(this)) {
- if (!%HasNumberClass(this))
+ if (!IS_NUMBER_WRAPPER(this))
throw new $TypeError('Number.prototype.toString is not generic');
// Get the value of this number in case it's an object.
number = %_ValueOf(this);
@@ -395,7 +395,7 @@
function NumberValueOf() {
// NOTE: Both Number objects and values can enter here as
// 'this'. This is not as dictated by ECMA-262.
- if (!IS_NUMBER(this) && !%HasNumberClass(this))
+ if (!IS_NUMBER(this) && !IS_NUMBER_WRAPPER(this))
throw new $TypeError('Number.prototype.valueOf is not generic');
return %_ValueOf(this);
}
@@ -502,10 +502,9 @@
$Function.prototype.constructor = $Function;
function FunctionSourceString(func) {
- // NOTE: Both Function objects and values can enter here as
- // 'func'. This is not as dictated by ECMA-262.
- if (!IS_FUNCTION(func) && !%HasFunctionClass(func))
+ if (!IS_FUNCTION(func)) {
throw new $TypeError('Function.prototype.toString is not generic');
+ }
var source = %FunctionGetSourceCode(func);
if (!IS_STRING(source)) {
@@ -570,4 +569,3 @@
}
SetupFunction();
-
diff --git a/src/version.cc b/src/version.cc
index 306249a..28baf3f 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 1
#define MINOR_VERSION 2
-#define BUILD_NUMBER 10
+#define BUILD_NUMBER 11
#define PATCH_LEVEL 0
#define CANDIDATE_VERSION false
diff --git a/src/virtual-frame.h b/src/virtual-frame.h
index 293f9e5..0bf0ca2 100644
--- a/src/virtual-frame.h
+++ b/src/virtual-frame.h
@@ -37,6 +37,8 @@
#include "x64/virtual-frame-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/virtual-frame-arm.h"
+#else
+#error Unsupported target architecture.
#endif
#endif // V8_VIRTUAL_FRAME_H_
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index 7da6a8f..ced7577 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -750,6 +750,15 @@
}
+void Assembler::imul(Register src) {
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ emit_rex_64(src);
+ emit(0xF7);
+ emit_modrm(0x5, src);
+}
+
+
void Assembler::imul(Register dst, Register src) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index 0d98e5f..4e1eeff 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -605,12 +605,13 @@
// Divide rdx:rax by src. Quotient in rax, remainder in rdx.
void idiv(Register src);
- void imul(Register dst, Register src);
- void imul(Register dst, const Operand& src);
- // Performs the operation dst = src * imm.
- void imul(Register dst, Register src, Immediate imm);
+ // Signed multiply instructions.
+ void imul(Register src); // rdx:rax = rax * src.
+ void imul(Register dst, Register src); // dst = dst * src.
+ void imul(Register dst, const Operand& src); // dst = dst * src.
+ void imul(Register dst, Register src, Immediate imm); // dst = src * imm.
// Multiply 32 bit registers
- void imull(Register dst, Register src);
+ void imull(Register dst, Register src); // dst = dst * src.
void incq(Register dst);
void incq(const Operand& dst);
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index fc196ce..4df63da 100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -25,9 +25,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// TODO(X64): Remove stdio.h when compiler test is removed.
-#include <stdio.h>
-
#include "v8.h"
#include "bootstrapper.h"
@@ -38,9 +35,6 @@
#include "register-allocator-inl.h"
#include "scopes.h"
-// TODO(X64): Remove compiler.h when compiler test is removed.
-#include "compiler.h"
-
namespace v8 {
namespace internal {
@@ -138,81 +132,6 @@
}
-void CodeGenerator::TestCodeGenerator() {
- // Compile a function from a string, and run it.
-
- // Set flags appropriately for this stage of implementation.
- // TODO(X64): Make ic work, and stop disabling them.
- // These settings stick - remove them when we don't want them anymore.
-#ifdef DEBUG
- FLAG_print_builtin_source = true;
- FLAG_print_builtin_ast = true;
-#endif
- FLAG_use_ic = false;
-
- // Read the file "test.js" from the current directory, compile, and run it.
- // If the file is not there, use a simple script embedded here instead.
- Handle<String> test_script;
- FILE* file = fopen("test.js", "rb");
- if (file == NULL) {
- test_script = Factory::NewStringFromAscii(CStrVector(
- "// Put all code in anonymous function to avoid global scope.\n"
- "(function(){"
- " var x = true ? 47 : 32;"
- " return x;"
- "})()"));
- } else {
- fseek(file, 0, SEEK_END);
- int size = ftell(file);
- rewind(file);
-
- char* chars = new char[size + 1];
- chars[size] = '\0';
- for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
- i += read;
- }
- fclose(file);
- test_script = Factory::NewStringFromAscii(CStrVector(chars));
- delete[] chars;
- }
-
- Handle<JSFunction> test_function = Compiler::Compile(
- test_script,
- Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")),
- 0,
- 0,
- NULL,
- NULL);
-
- Code* code_object = test_function->code(); // Local for debugging ease.
- USE(code_object);
-
- // Create a dummy function and context.
- Handle<JSFunction> bridge =
- Factory::NewFunction(Factory::empty_symbol(), Factory::undefined_value());
- Handle<Context> context =
- Factory::NewFunctionContext(Context::MIN_CONTEXT_SLOTS, bridge);
-
- test_function = Factory::NewFunctionFromBoilerplate(
- test_function,
- context);
-
- bool pending_exceptions;
- Handle<Object> result =
- Execution::Call(test_function,
- Handle<Object>::cast(test_function),
- 0,
- NULL,
- &pending_exceptions);
- // Function compiles and runs, but returns a JSFunction object.
-#ifdef DEBUG
- PrintF("Result of test function: ");
- result->Print();
-#endif
-}
-
-
void CodeGenerator::GenCode(FunctionLiteral* function) {
// Record the position for debugging purposes.
CodeForFunctionPosition(function);
@@ -1895,7 +1814,7 @@
void CodeGenerator::VisitSlot(Slot* node) {
Comment cmnt(masm_, "[ Slot");
- LoadFromSlot(node, typeof_state());
+ LoadFromSlotCheckForArguments(node, typeof_state());
}
@@ -2227,12 +2146,12 @@
Result elements = frame_->Pop();
elements.ToRegister();
frame_->Spill(elements.reg());
- // Get the elements array.
+ // Get the elements FixedArray.
__ movq(elements.reg(),
FieldOperand(elements.reg(), JSObject::kElementsOffset));
// Write to the indexed properties array.
- int offset = i * kPointerSize + Array::kHeaderSize;
+ int offset = i * kPointerSize + FixedArray::kHeaderSize;
__ movq(FieldOperand(elements.reg(), offset), prop_value.reg());
// Update the write barrier for the array address.
@@ -2300,7 +2219,7 @@
// or the right hand side is a different variable. TakeValue invalidates
// the target, with an implicit promise that it will be written to again
// before it is read.
- // TODO(X64): Implement TakeValue optimization.
+ // TODO(X64): Implement TakeValue optimization. Check issue 150016.
if (false) {
// if (literal != NULL || (right_var != NULL && right_var != var)) {
// target.TakeValue(NOT_INSIDE_TYPEOF);
@@ -2410,9 +2329,6 @@
frame_->SetElementAt(0, &result);
} else if (var != NULL && var->slot() != NULL &&
var->slot()->type() == Slot::LOOKUP) {
- // TODO(X64): Enable calls of non-global functions.
- UNIMPLEMENTED();
- /*
// ----------------------------------
// JavaScript example: 'with (obj) foo(1, 2, 3)' // foo is in obj
// ----------------------------------
@@ -2420,8 +2336,8 @@
// Load the function from the context. Sync the frame so we can
// push the arguments directly into place.
frame_->SyncRange(0, frame_->element_count() - 1);
- frame_->EmitPush(esi);
- frame_->EmitPush(Immediate(var->name()));
+ frame_->EmitPush(rsi);
+ frame_->EmitPush(var->name());
frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
// The runtime call returns a pair of values in rax and rdx. The
// looked-up function is in rax and the receiver is in rdx. These
@@ -2437,7 +2353,6 @@
// Call the function.
CallWithArguments(args, node->position());
- */
} else if (property != NULL) {
// Check if the key is a literal string.
Literal* literal = property->key()->AsLiteral();
@@ -3227,10 +3142,11 @@
// It can be an undetectable object.
__ movq(kScratchRegister,
FieldOperand(answer.reg(), HeapObject::kMapOffset));
- __ movb(kScratchRegister,
- FieldOperand(kScratchRegister, Map::kBitFieldOffset));
- __ testb(kScratchRegister, Immediate(1 << Map::kIsUndetectable));
+ __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
+ Immediate(1 << Map::kIsUndetectable));
destination()->false_target()->Branch(not_zero);
+ __ movb(kScratchRegister,
+ FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
__ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
destination()->false_target()->Branch(below);
__ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
@@ -3330,6 +3246,14 @@
}
+void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
+ // TODO(X64): Optimize this like it's done on IA-32.
+ ASSERT(args->length() == 0);
+ Result answer = frame_->CallRuntime(Runtime::kIsConstructCall, 0);
+ frame_->Push(&answer);
+}
+
+
void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
// ArgumentsAccessStub takes the parameter count as an input argument
@@ -3412,16 +3336,45 @@
}
+void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) {
+ ASSERT(args->length() == 0);
+ frame_->SpillAll();
-void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* a) {
- UNIMPLEMENTED();
+ // Make sure the frame is aligned like the OS expects.
+ static const int kFrameAlignment = OS::ActivationFrameAlignment();
+ if (kFrameAlignment > 0) {
+ ASSERT(IsPowerOf2(kFrameAlignment));
+ __ movq(rbx, rsp); // Save in AMD-64 abi callee-saved register.
+ __ and_(rsp, Immediate(-kFrameAlignment));
+ }
+
+ // Call V8::RandomPositiveSmi().
+ __ Call(FUNCTION_ADDR(V8::RandomPositiveSmi), RelocInfo::RUNTIME_ENTRY);
+
+ // Restore stack pointer from callee-saved register edi.
+ if (kFrameAlignment > 0) {
+ __ movq(rsp, rbx);
+ }
+
+ Result result = allocator_->Allocate(rax);
+ frame_->Push(&result);
}
+
void CodeGenerator::GenerateFastMathOp(MathOp op, ZoneList<Expression*>* args) {
UNIMPLEMENTED();
}
+void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
+ // TODO(X64): Optimize this like it's done on IA-32.
+ ASSERT(args->length() == 1);
+ Load(args->at(0)); // Load the object.
+ Result result = frame_->CallRuntime(Runtime::kClassOf, 1);
+ frame_->Push(&result);
+}
+
+
void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
JumpTarget leave;
@@ -3906,6 +3859,44 @@
}
+void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
+ TypeofState state) {
+ LoadFromSlot(slot, state);
+
+ // Bail out quickly if we're not using lazy arguments allocation.
+ if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
+
+ // ... or if the slot isn't a non-parameter arguments slot.
+ if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
+
+ // Pop the loaded value from the stack.
+ Result value = frame_->Pop();
+
+ // If the loaded value is a constant, we know if the arguments
+ // object has been lazily loaded yet.
+ if (value.is_constant()) {
+ if (value.handle()->IsTheHole()) {
+ Result arguments = StoreArgumentsObject(false);
+ frame_->Push(&arguments);
+ } else {
+ frame_->Push(&value);
+ }
+ return;
+ }
+
+ // The loaded value is in a register. If it is the sentinel that
+ // indicates that we haven't loaded the arguments object yet, we
+ // need to do it now.
+ JumpTarget exit;
+ __ Cmp(value.reg(), Factory::the_hole_value());
+ frame_->Push(&value);
+ exit.Branch(not_equal);
+ Result arguments = StoreArgumentsObject(false);
+ frame_->SetElementAt(0, &arguments);
+ exit.Bind();
+}
+
+
void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
// TODO(X64): Enable more types of slot.
@@ -4009,8 +4000,72 @@
Slot* slot,
TypeofState typeof_state,
JumpTarget* slow) {
- UNIMPLEMENTED();
- return Result(rax);
+ // Check that no extension objects have been created by calls to
+ // eval from the current scope to the global scope.
+ Register context = rsi;
+ Result tmp = allocator_->Allocate();
+ ASSERT(tmp.is_valid()); // All non-reserved registers were available.
+
+ Scope* s = scope();
+ while (s != NULL) {
+ if (s->num_heap_slots() > 0) {
+ if (s->calls_eval()) {
+ // Check that extension is NULL.
+ __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
+ Immediate(0));
+ slow->Branch(not_equal, not_taken);
+ }
+ // Load next context in chain.
+ __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
+ __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
+ context = tmp.reg();
+ }
+ // If no outer scope calls eval, we do not need to check more
+ // context extensions. If we have reached an eval scope, we check
+ // all extensions from this point.
+ if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+ s = s->outer_scope();
+ }
+
+ if (s->is_eval_scope()) {
+ // Loop up the context chain. There is no frame effect so it is
+ // safe to use raw labels here.
+ Label next, fast;
+ if (!context.is(tmp.reg())) {
+ __ movq(tmp.reg(), context);
+ }
+ // Load map for comparison into register, outside loop.
+ __ Move(kScratchRegister, Factory::global_context_map());
+ __ bind(&next);
+ // Terminate at global context.
+ __ cmpq(kScratchRegister, FieldOperand(tmp.reg(), HeapObject::kMapOffset));
+ __ j(equal, &fast);
+ // Check that extension is NULL.
+ __ cmpq(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
+ slow->Branch(not_equal);
+ // Load next context in chain.
+ __ movq(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
+ __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
+ __ jmp(&next);
+ __ bind(&fast);
+ }
+ tmp.Unuse();
+
+ // All extension objects were empty and it is safe to use a global
+ // load IC call.
+ LoadGlobal();
+ frame_->Push(slot->var()->name());
+ RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
+ ? RelocInfo::CODE_TARGET
+ : RelocInfo::CODE_TARGET_CONTEXT;
+ Result answer = frame_->CallLoadIC(mode);
+ // A test rax instruction following the call signals that the inobject
+ // property case was inlined. Ensure that there is not a test eax
+ // instruction here.
+ __ nop();
+ // Discard the global object. The result is in answer.
+ frame_->Drop();
+ return answer;
}
@@ -4120,39 +4175,6 @@
}
-class CompareStub: public CodeStub {
- public:
- CompareStub(Condition cc, bool strict) : cc_(cc), strict_(strict) { }
-
- void Generate(MacroAssembler* masm);
-
- private:
- Condition cc_;
- bool strict_;
-
- Major MajorKey() { return Compare; }
-
- int MinorKey() {
- // Encode the three parameters in a unique 16 bit value.
- ASSERT(static_cast<int>(cc_) < (1 << 15));
- return (static_cast<int>(cc_) << 1) | (strict_ ? 1 : 0);
- }
-
- // Branch to the label if the given object isn't a symbol.
- void BranchIfNonSymbol(MacroAssembler* masm,
- Label* label,
- Register object);
-
-#ifdef DEBUG
- void Print() {
- PrintF("CompareStub (cc %d), (strict %s)\n",
- static_cast<int>(cc_),
- strict_ ? "true" : "false");
- }
-#endif
-};
-
-
void CodeGenerator::Comparison(Condition cc,
bool strict,
ControlDestination* dest) {
@@ -5148,7 +5170,7 @@
Comment cmnt(masm, "[ Load from Slot");
Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
ASSERT(slot != NULL);
- cgen_->LoadFromSlot(slot, typeof_state);
+ cgen_->LoadFromSlotCheckForArguments(slot, typeof_state);
break;
}
@@ -5481,12 +5503,32 @@
}
-
-
// End of CodeGenerator implementation.
void UnarySubStub::Generate(MacroAssembler* masm) {
- UNIMPLEMENTED();
+ Label slow;
+ Label done;
+
+ // Check whether the value is a smi.
+ __ testl(rax, Immediate(kSmiTagMask));
+ // TODO(X64): Add inline code that handles floats, as on ia32 platform.
+ __ j(not_zero, &slow);
+ // Enter runtime system if the value of the smi is zero
+ // to make sure that we switch between 0 and -0.
+ // Also enter it if the value of the smi is Smi::kMinValue
+ __ testl(rax, Immediate(0x7FFFFFFE));
+ __ j(zero, &slow);
+ __ neg(rax);
+ __ jmp(&done);
+ // Enter runtime system.
+ __ bind(&slow);
+ __ pop(rcx); // pop return address
+ __ push(rax);
+ __ push(rcx); // push return address
+ __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
+
+ __ bind(&done);
+ __ StubReturn(1);
}
@@ -5647,8 +5689,8 @@
// Fast negative check for symbol-to-symbol equality.
__ bind(&check_for_symbols);
if (cc_ == equal) {
- BranchIfNonSymbol(masm, &call_builtin, rax);
- BranchIfNonSymbol(masm, &call_builtin, rdx);
+ BranchIfNonSymbol(masm, &call_builtin, rax, kScratchRegister);
+ BranchIfNonSymbol(masm, &call_builtin, rdx, kScratchRegister);
// We've already checked for object identity, so if both operands
// are symbols they aren't equal. Register rax already holds a
@@ -5691,14 +5733,15 @@
void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
Label* label,
- Register object) {
+ Register object,
+ Register scratch) {
__ testl(object, Immediate(kSmiTagMask));
__ j(zero, label);
- __ movq(kScratchRegister, FieldOperand(object, HeapObject::kMapOffset));
- __ movzxbq(kScratchRegister,
- FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
- __ and_(kScratchRegister, Immediate(kIsSymbolMask | kIsNotStringMask));
- __ cmpb(kScratchRegister, Immediate(kSymbolTag | kStringTag));
+ __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
+ __ movzxbq(scratch,
+ FieldOperand(scratch, Map::kInstanceTypeOffset));
+ __ and_(scratch, Immediate(kIsSymbolMask | kIsNotStringMask));
+ __ cmpb(scratch, Immediate(kSymbolTag | kStringTag));
__ j(not_equal, label);
}
@@ -6752,8 +6795,6 @@
// If all else fails, use the runtime system to get the correct
// result.
__ bind(&call_runtime);
- // Disable builtin-calls until JS builtins can compile and run.
- __ Abort("Disabled until builtins compile and run.");
switch (op_) {
case Token::ADD:
__ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
@@ -6794,6 +6835,13 @@
}
+int CompareStub::MinorKey() {
+ // Encode the two parameters in a unique 16 bit value.
+ ASSERT(static_cast<unsigned>(cc_) < (1 << 15));
+ return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0);
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/x64/codegen-x64.h b/src/x64/codegen-x64.h
index af82de8..0e8505a 100644
--- a/src/x64/codegen-x64.h
+++ b/src/x64/codegen-x64.h
@@ -294,15 +294,6 @@
Handle<Script> script,
bool is_eval);
- // During implementation of CodeGenerator, this call creates a
- // CodeGenerator instance, and calls GenCode on it with a null
- // function literal. CodeGenerator will then construct and return
- // a simple dummy function. Call this during bootstrapping before
- // trying to compile any real functions, to get CodeGenerator up
- // and running.
- // TODO(X64): Remove once we can get through the bootstrapping process.
- static void TestCodeGenerator();
-
#ifdef ENABLE_LOGGING_AND_PROFILING
static bool ShouldGenerateLog(Expression* type);
#endif
@@ -432,6 +423,7 @@
// Read a value from a slot and leave it on top of the expression stack.
void LoadFromSlot(Slot* slot, TypeofState typeof_state);
+ void LoadFromSlotCheckForArguments(Slot* slot, TypeofState state);
Result LoadFromGlobalSlotCheckExtensions(Slot* slot,
TypeofState typeof_state,
JumpTarget* slow);
@@ -522,11 +514,15 @@
void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
void GenerateIsArray(ZoneList<Expression*>* args);
+ // Support for construct call checks.
+ void GenerateIsConstructCall(ZoneList<Expression*>* args);
+
// Support for arguments.length and arguments[?].
void GenerateArgumentsLength(ZoneList<Expression*>* args);
void GenerateArgumentsAccess(ZoneList<Expression*>* args);
- // Support for accessing the value field of an object (used by Date).
+ // Support for accessing the class and value fields of an object.
+ void GenerateClassOf(ZoneList<Expression*>* args);
void GenerateValueOf(ZoneList<Expression*>* args);
void GenerateSetValueOf(ZoneList<Expression*>* args);
diff --git a/src/x64/frames-x64.h b/src/x64/frames-x64.h
index d4ab2c6..24c78da 100644
--- a/src/x64/frames-x64.h
+++ b/src/x64/frames-x64.h
@@ -59,12 +59,7 @@
class EntryFrameConstants : public AllStatic {
public:
- static const int kCallerFPOffset = -6 * kPointerSize;
-
- static const int kFunctionArgOffset = +3 * kPointerSize;
- static const int kReceiverArgOffset = +4 * kPointerSize;
- static const int kArgcOffset = +5 * kPointerSize;
- static const int kArgvOffset = +6 * kPointerSize;
+ static const int kCallerFPOffset = -10 * kPointerSize;
};
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index f6a5ffe..780fcdb 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -79,51 +79,6 @@
}
-void MacroAssembler::ConstructAndTestJSFunction() {
- const int initial_buffer_size = 4 * KB;
- char* buffer = new char[initial_buffer_size];
- MacroAssembler masm(buffer, initial_buffer_size);
-
- const uint64_t secret = V8_INT64_C(0xdeadbeefcafebabe);
- Handle<String> constant =
- Factory::NewStringFromAscii(Vector<const char>("451", 3), TENURED);
-#define __ ACCESS_MASM((&masm))
- // Construct a simple JSfunction here, using Assembler and MacroAssembler
- // commands.
- __ movq(rax, constant, RelocInfo::EMBEDDED_OBJECT);
- __ push(rax);
- __ CallRuntime(Runtime::kStringParseFloat, 1);
- __ movq(kScratchRegister, secret, RelocInfo::NONE);
- __ addq(rax, kScratchRegister);
- __ ret(0);
-#undef __
- CodeDesc desc;
- masm.GetCode(&desc);
- Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
- Object* code = Heap::CreateCode(desc, NULL, flags, Handle<Object>::null());
- if (!code->IsFailure()) {
- Handle<Code> code_handle(Code::cast(code));
- Handle<String> name =
- Factory::NewStringFromAscii(Vector<const char>("foo", 3), NOT_TENURED);
- Handle<JSFunction> function =
- Factory::NewFunction(name,
- JS_FUNCTION_TYPE,
- JSObject::kHeaderSize,
- code_handle,
- true);
- bool pending_exceptions;
- Handle<Object> result =
- Execution::Call(function,
- Handle<Object>::cast(function),
- 0,
- NULL,
- &pending_exceptions);
- CHECK(result->IsSmi());
- CHECK(secret + (451 << kSmiTagSize) == reinterpret_cast<uint64_t>(*result));
- }
-}
-
-
void MacroAssembler::Abort(const char* msg) {
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index 3ae78ba..c298a25 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -67,16 +67,6 @@
MacroAssembler(void* buffer, int size);
// ---------------------------------------------------------------------------
- // x64 Implementation Support
-
- // Test the MacroAssembler by constructing and calling a simple JSFunction.
- // Cannot be done using API because this must be done in the middle of the
- // bootstrapping process.
- // TODO(X64): Remove once we can get through the bootstrapping process.
-
- static void ConstructAndTestJSFunction();
-
- // ---------------------------------------------------------------------------
// GC Support
// Set the remembered set bit for [object+offset].
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index fdfa67f..1a24694 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -42,8 +42,7 @@
Object* CallStubCompiler::CompileCallConstant(Object* a,
JSObject* b,
JSFunction* c,
- StubCompiler::CheckType d,
- Code::Flags flags) {
+ StubCompiler::CheckType d) {
UNIMPLEMENTED();
return NULL;
}
@@ -51,8 +50,7 @@
Object* CallStubCompiler::CompileCallField(Object* a,
JSObject* b,
int c,
- String* d,
- Code::Flags flags) {
+ String* d) {
UNIMPLEMENTED();
return NULL;
}
@@ -67,6 +65,15 @@
+Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ JSFunction* function,
+ String* name) {
+ UNIMPLEMENTED();
+ return NULL;
+}
+
+
Object* LoadStubCompiler::CompileLoadCallback(JSObject* a,
JSObject* b,
AccessorInfo* c,
@@ -102,6 +109,15 @@
}
+Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ String* name,
+ bool is_dont_delete) {
+ UNIMPLEMENTED();
+ return NULL;
+}
+
+
Object* StoreStubCompiler::CompileStoreCallback(JSObject* a,
AccessorInfo* b,
String* c) {
@@ -125,6 +141,14 @@
}
+Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
+ JSGlobalPropertyCell* cell,
+ String* name) {
+ UNIMPLEMENTED();
+ return NULL;
+}
+
+
// TODO(1241006): Avoid having lazy compile stubs specialized by the
// number of arguments. It is not needed anymore.
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {