Update V8 to r6190 as required by WebKit r75315
Change-Id: I0b2f598e4d8748df417ad350fc47a1c465ad1fef
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 36f7507..cd7f07f 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -66,13 +66,14 @@
// such that we use an enum in optimized mode, and the struct in debug
// mode. This way we get the compile-time error checking in debug mode
// and best performance in optimized code.
-//
+
// Core register
struct Register {
static const int kNumRegisters = 16;
static const int kNumAllocatableRegisters = 8;
static int ToAllocationIndex(Register reg) {
+ ASSERT(reg.code() < kNumAllocatableRegisters);
return reg.code();
}
@@ -132,7 +133,7 @@
const Register r6 = { 6 };
const Register r7 = { 7 };
const Register r8 = { 8 }; // Used as context register.
-const Register r9 = { 9 };
+const Register r9 = { 9 }; // Used as lithium codegen scratch register.
const Register r10 = { 10 }; // Used as roots register.
const Register fp = { 11 };
const Register ip = { 12 };
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 5ec8584..577ac63 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -917,13 +917,6 @@
}
-void RecordWriteStub::Generate(MacroAssembler* masm) {
- __ add(offset_, object_, Operand(offset_));
- __ RecordWriteHelper(object_, offset_, scratch_);
- __ Ret();
-}
-
-
// On entry lhs_ and rhs_ are the values to be compared.
// On exit r0 is 0, positive or negative to indicate the result of
// the comparison.
@@ -1229,16 +1222,22 @@
bool generate_code_to_calculate_answer = true;
if (ShouldGenerateFPCode()) {
+ // DIV has neither SmiSmi fast code nor specialized slow code.
+ // So don't try to patch a DIV Stub.
if (runtime_operands_type_ == BinaryOpIC::DEFAULT) {
switch (op_) {
case Token::ADD:
case Token::SUB:
case Token::MUL:
- case Token::DIV:
GenerateTypeTransition(masm); // Tail call.
generate_code_to_calculate_answer = false;
break;
+ case Token::DIV:
+ // DIV has neither SmiSmi fast code nor specialized slow code.
+ // So don't try to patch a DIV Stub.
+ break;
+
default:
break;
}
@@ -1299,7 +1298,8 @@
// HEAP_NUMBERS stub is slower than GENERIC on a pair of smis.
// r0 is known to be a smi. If r1 is also a smi then switch to GENERIC.
Label r1_is_not_smi;
- if (runtime_operands_type_ == BinaryOpIC::HEAP_NUMBERS) {
+ if ((runtime_operands_type_ == BinaryOpIC::HEAP_NUMBERS) &&
+ HasSmiSmiFastPath()) {
__ tst(r1, Operand(kSmiTagMask));
__ b(ne, &r1_is_not_smi);
GenerateTypeTransition(masm); // Tail call.
@@ -2894,45 +2894,45 @@
// Uses registers r0 to r4. Expected input is
-// function in r0 (or at sp+1*ptrsz) and object in
+// object in r0 (or at sp+1*kPointerSize) and function in
// r1 (or at sp), depending on whether or not
// args_in_registers() is true.
void InstanceofStub::Generate(MacroAssembler* masm) {
// Fixed register usage throughout the stub:
- const Register object = r1; // Object (lhs).
+ const Register object = r0; // Object (lhs).
const Register map = r3; // Map of the object.
- const Register function = r0; // Function (rhs).
+ const Register function = r1; // Function (rhs).
const Register prototype = r4; // Prototype of the function.
const Register scratch = r2;
Label slow, loop, is_instance, is_not_instance, not_js_object;
if (!args_in_registers()) {
- __ ldr(function, MemOperand(sp, 1 * kPointerSize));
- __ ldr(object, MemOperand(sp, 0));
+ __ ldr(object, MemOperand(sp, 1 * kPointerSize));
+ __ ldr(function, MemOperand(sp, 0));
}
// Check that the left hand is a JS object and load map.
- __ BranchOnSmi(object, &slow);
- __ IsObjectJSObjectType(object, map, scratch, &slow);
+ __ BranchOnSmi(object, ¬_js_object);
+ __ IsObjectJSObjectType(object, map, scratch, ¬_js_object);
// Look up the function and the map in the instanceof cache.
Label miss;
__ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
- __ cmp(object, ip);
+ __ cmp(function, ip);
__ b(ne, &miss);
__ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
__ cmp(map, ip);
__ b(ne, &miss);
- __ LoadRoot(function, Heap::kInstanceofCacheAnswerRootIndex);
+ __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret(args_in_registers() ? 0 : 2);
__ bind(&miss);
- __ TryGetFunctionPrototype(object, prototype, scratch, &slow);
+ __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
// Check that the function prototype is a JS object.
__ BranchOnSmi(prototype, &slow);
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
- __ StoreRoot(object, Heap::kInstanceofCacheFunctionRootIndex);
+ __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
// Register mapping: r3 is object map and r4 is function prototype.
@@ -2957,6 +2957,7 @@
__ bind(&is_not_instance);
__ mov(r0, Operand(Smi::FromInt(1)));
+ __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret(args_in_registers() ? 0 : 2);
Label object_not_null, object_not_null_or_smi;
@@ -2986,6 +2987,9 @@
__ Ret(args_in_registers() ? 0 : 2);
// Slow-case. Tail call builtin.
+ if (args_in_registers()) {
+ __ Push(r0, r1);
+ }
__ bind(&slow);
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
}
diff --git a/src/arm/code-stubs-arm.h b/src/arm/code-stubs-arm.h
index 8ffca77..9fa8687 100644
--- a/src/arm/code-stubs-arm.h
+++ b/src/arm/code-stubs-arm.h
@@ -77,7 +77,7 @@
rhs_(rhs),
constant_rhs_(constant_rhs),
specialized_on_rhs_(RhsIsOneWeWantToOptimizeFor(op, constant_rhs)),
- runtime_operands_type_(BinaryOpIC::DEFAULT),
+ runtime_operands_type_(BinaryOpIC::UNINIT_OR_SMI),
name_(NULL) { }
GenericBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info)
@@ -178,6 +178,10 @@
return lhs_is_r0 ? r1 : r0;
}
+ bool HasSmiSmiFastPath() {
+ return op_ != Token::DIV;
+ }
+
bool ShouldGenerateSmiCode() {
return ((op_ != Token::DIV && op_ != Token::MOD) || specialized_on_rhs_) &&
runtime_operands_type_ != BinaryOpIC::HEAP_NUMBERS &&
@@ -437,43 +441,6 @@
};
-class RecordWriteStub : public CodeStub {
- public:
- RecordWriteStub(Register object, Register offset, Register scratch)
- : object_(object), offset_(offset), scratch_(scratch) { }
-
- void Generate(MacroAssembler* masm);
-
- private:
- Register object_;
- Register offset_;
- Register scratch_;
-
- // Minor key encoding in 12 bits. 4 bits for each of the three
- // registers (object, offset and scratch) OOOOAAAASSSS.
- class ScratchBits: public BitField<uint32_t, 0, 4> {};
- class OffsetBits: public BitField<uint32_t, 4, 4> {};
- class ObjectBits: public BitField<uint32_t, 8, 4> {};
-
- Major MajorKey() { return RecordWrite; }
-
- int MinorKey() {
- // Encode the registers.
- return ObjectBits::encode(object_.code()) |
- OffsetBits::encode(offset_.code()) |
- ScratchBits::encode(scratch_.code());
- }
-
-#ifdef DEBUG
- void Print() {
- PrintF("RecordWriteStub (object reg %d), (offset reg %d),"
- " (scratch reg %d)\n",
- object_.code(), offset_.code(), scratch_.code());
- }
-#endif
-};
-
-
// Enter C code from generated RegExp code in a way that allows
// the C code to fix the return address in case of a GC.
// Currently only needed on ARM.
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 59bc14e..4d061d2 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -5618,12 +5618,10 @@
// (or them and test against Smi mask.)
__ mov(tmp2, tmp1);
- RecordWriteStub recordWrite1(tmp1, index1, tmp3);
- __ CallStub(&recordWrite1);
-
- RecordWriteStub recordWrite2(tmp2, index2, tmp3);
- __ CallStub(&recordWrite2);
-
+ __ add(index1, index1, tmp1);
+ __ add(index2, index2, tmp1);
+ __ RecordWriteHelper(tmp1, index1, tmp3);
+ __ RecordWriteHelper(tmp2, index2, tmp3);
__ bind(&done);
deferred->BindExit();
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index d254918..0275730 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -38,6 +38,8 @@
#include "scopes.h"
#include "stub-cache.h"
+#include "arm/code-stubs-arm.h"
+
namespace v8 {
namespace internal {
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index e31d2e1..87efc92 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -767,11 +767,6 @@
}
-LInstruction* LChunkBuilder::DefineSameAsAny(LInstruction* instr) {
- return Define(instr, new LUnallocated(LUnallocated::SAME_AS_ANY_INPUT));
-}
-
-
LInstruction* LChunkBuilder::DefineSameAsFirst(LInstruction* instr) {
return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
}
@@ -1016,9 +1011,6 @@
HInstruction* current = block->first();
int start = chunk_->instructions()->length();
while (current != NULL && !is_aborted()) {
- if (FLAG_trace_environment) {
- PrintF("Process instruction %d\n", current->id());
- }
// Code for constants in registers is generated lazily.
if (!current->EmitAtUses()) {
VisitInstruction(current);
@@ -1125,7 +1117,7 @@
LEnvironment* outer = CreateEnvironment(hydrogen_env->outer());
int ast_id = hydrogen_env->ast_id();
ASSERT(ast_id != AstNode::kNoNumber);
- int value_count = hydrogen_env->values()->length();
+ int value_count = hydrogen_env->length();
LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
ast_id,
hydrogen_env->parameter_count(),
@@ -1225,7 +1217,6 @@
ASSERT(compare->value()->representation().IsTagged());
return new LHasInstanceTypeAndBranch(UseRegisterAtStart(compare->value()),
- TempRegister(),
first_id,
second_id);
} else if (v->IsHasCachedArrayIndex()) {
@@ -1238,11 +1229,8 @@
HIsNull* compare = HIsNull::cast(v);
ASSERT(compare->value()->representation().IsTagged());
- // We only need a temp register for non-strict compare.
- LOperand* temp = compare->is_strict() ? NULL : TempRegister();
return new LIsNullAndBranch(UseRegisterAtStart(compare->value()),
compare->is_strict(),
- temp,
first_id,
second_id);
} else if (v->IsIsObject()) {
@@ -1295,12 +1283,8 @@
HCompareMapAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- HBasicBlock* first = instr->FirstSuccessor();
- HBasicBlock* second = instr->SecondSuccessor();
- return new LCmpMapAndBranch(value,
- instr->map(),
- first->block_id(),
- second->block_id());
+ LOperand* temp = TempRegister();
+ return new LCmpMapAndBranch(value, temp);
}
@@ -1316,8 +1300,8 @@
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* result =
- new LInstanceOf(UseFixed(instr->left(), r1),
- UseFixed(instr->right(), r0));
+ new LInstanceOf(UseFixed(instr->left(), r0),
+ UseFixed(instr->right(), r1));
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1370,6 +1354,9 @@
return AssignEnvironment(DefineAsRegister(result));
case kMathSqrt:
return DefineSameAsFirst(result);
+ case kMathRound:
+ Abort("MathRound LUnaryMathOperation not implemented");
+ return NULL;
case kMathPowHalf:
Abort("MathPowHalf LUnaryMathOperation not implemented");
return NULL;
@@ -1666,19 +1653,15 @@
}
-LInstruction* LChunkBuilder::DoArrayLength(HArrayLength* instr) {
- LOperand* array = NULL;
- LOperand* temporary = NULL;
+LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
+ LOperand* array = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LJSArrayLength(array));
+}
- if (instr->value()->IsLoadElements()) {
- array = UseRegisterAtStart(instr->value());
- } else {
- array = UseRegister(instr->value());
- temporary = TempRegister();
- }
- LInstruction* result = new LArrayLength(array, temporary);
- return AssignEnvironment(DefineAsRegister(result));
+LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) {
+ LOperand* array = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new LFixedArrayLength(array));
}
@@ -1778,9 +1761,11 @@
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LOperand* temp = TempRegister();
+ LOperand* temp1 = TempRegister();
+ LOperand* temp2 = TempRegister();
LInstruction* result =
- new LCheckPrototypeMaps(temp,
+ new LCheckPrototypeMaps(temp1,
+ temp2,
instr->holder(),
instr->receiver_map());
return AssignEnvironment(result);
@@ -1854,6 +1839,13 @@
}
+LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
+ HLoadFunctionPrototype* instr) {
+ return AssignEnvironment(DefineAsRegister(
+ new LLoadFunctionPrototype(UseRegister(instr->function()))));
+}
+
+
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
return DefineSameAsFirst(new LLoadElements(input));
@@ -2054,13 +2046,7 @@
}
}
- if (FLAG_trace_environment) {
- PrintF("Reconstructed environment ast_id=%d, instr_id=%d\n",
- instr->ast_id(),
- instr->id());
- env->PrintToStd();
- }
- ASSERT(env->values()->length() == instr->environment_height());
+ ASSERT(env->length() == instr->environment_length());
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h
index 41209c6..2f8cc1c 100644
--- a/src/arm/lithium-arm.h
+++ b/src/arm/lithium-arm.h
@@ -101,7 +101,8 @@
// LStoreNamedField
// LStoreNamedGeneric
// LUnaryOperation
-// LArrayLength
+// LJSArrayLength
+// LFixedArrayLength
// LBitNotI
// LBranch
// LCallNew
@@ -127,6 +128,7 @@
// LIsSmiAndBranch
// LLoadNamedField
// LLoadNamedGeneric
+// LLoadFunctionPrototype
// LNumberTagD
// LNumberTagI
// LPushArgument
@@ -161,7 +163,6 @@
V(ArgumentsLength) \
V(ArithmeticD) \
V(ArithmeticT) \
- V(ArrayLength) \
V(ArrayLiteral) \
V(BitI) \
V(BitNotI) \
@@ -195,6 +196,7 @@
V(Deoptimize) \
V(DivI) \
V(DoubleToI) \
+ V(FixedArrayLength) \
V(FunctionLiteral) \
V(Gap) \
V(GlobalObject) \
@@ -209,6 +211,7 @@
V(IsObjectAndBranch) \
V(IsSmi) \
V(IsSmiAndBranch) \
+ V(JSArrayLength) \
V(HasInstanceType) \
V(HasInstanceTypeAndBranch) \
V(HasCachedArrayIndex) \
@@ -223,6 +226,7 @@
V(LoadKeyedGeneric) \
V(LoadNamedField) \
V(LoadNamedGeneric) \
+ V(LoadFunctionPrototype) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
@@ -722,11 +726,9 @@
public:
LIsNullAndBranch(LOperand* value,
bool is_strict,
- LOperand* temp,
int true_block_id,
int false_block_id)
: LIsNull(value, is_strict),
- temp_(temp),
true_block_id_(true_block_id),
false_block_id_(false_block_id) { }
@@ -737,10 +739,7 @@
int true_block_id() const { return true_block_id_; }
int false_block_id() const { return false_block_id_; }
- LOperand* temp() const { return temp_; }
-
private:
- LOperand* temp_;
int true_block_id_;
int false_block_id_;
};
@@ -835,11 +834,9 @@
class LHasInstanceTypeAndBranch: public LHasInstanceType {
public:
LHasInstanceTypeAndBranch(LOperand* value,
- LOperand* temporary,
int true_block_id,
int false_block_id)
: LHasInstanceType(value),
- temp_(temporary),
true_block_id_(true_block_id),
false_block_id_(false_block_id) { }
@@ -851,10 +848,7 @@
int true_block_id() const { return true_block_id_; }
int false_block_id() const { return false_block_id_; }
- LOperand* temp() { return temp_; }
-
private:
- LOperand* temp_;
int true_block_id_;
int false_block_id_;
};
@@ -1117,42 +1111,43 @@
class LCmpMapAndBranch: public LUnaryOperation {
public:
- LCmpMapAndBranch(LOperand* value,
- Handle<Map> map,
- int true_block_id,
- int false_block_id)
- : LUnaryOperation(value),
- map_(map),
- true_block_id_(true_block_id),
- false_block_id_(false_block_id) { }
+ LCmpMapAndBranch(LOperand* value, LOperand* temp)
+ : LUnaryOperation(value), temp_(temp) { }
DECLARE_CONCRETE_INSTRUCTION(CmpMapAndBranch, "cmp-map-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(CompareMapAndBranch)
virtual bool IsControl() const { return true; }
- Handle<Map> map() const { return map_; }
- int true_block_id() const { return true_block_id_; }
- int false_block_id() const { return false_block_id_; }
+ LOperand* temp() const { return temp_; }
+ Handle<Map> map() const { return hydrogen()->map(); }
+ int true_block_id() const {
+ return hydrogen()->true_destination()->block_id();
+ }
+ int false_block_id() const {
+ return hydrogen()->false_destination()->block_id();
+ }
private:
- Handle<Map> map_;
- int true_block_id_;
- int false_block_id_;
+ LOperand* temp_;
};
-class LArrayLength: public LUnaryOperation {
+class LJSArrayLength: public LUnaryOperation {
public:
- LArrayLength(LOperand* input, LOperand* temporary)
- : LUnaryOperation(input), temporary_(temporary) { }
+ explicit LJSArrayLength(LOperand* input) : LUnaryOperation(input) { }
- LOperand* temporary() const { return temporary_; }
+ DECLARE_CONCRETE_INSTRUCTION(JSArrayLength, "js-array-length")
+ DECLARE_HYDROGEN_ACCESSOR(JSArrayLength)
+};
- DECLARE_CONCRETE_INSTRUCTION(ArrayLength, "array-length")
- DECLARE_HYDROGEN_ACCESSOR(ArrayLength)
- private:
- LOperand* temporary_;
+class LFixedArrayLength: public LUnaryOperation {
+ public:
+ explicit LFixedArrayLength(LOperand* input) : LUnaryOperation(input) { }
+
+ DECLARE_CONCRETE_INSTRUCTION(FixedArrayLength, "fixed-array-length")
+ DECLARE_HYDROGEN_ACCESSOR(FixedArrayLength)
};
@@ -1256,6 +1251,18 @@
};
+class LLoadFunctionPrototype: public LUnaryOperation {
+ public:
+ explicit LLoadFunctionPrototype(LOperand* function)
+ : LUnaryOperation(function) { }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadFunctionPrototype, "load-function-prototype")
+ DECLARE_HYDROGEN_ACCESSOR(LoadFunctionPrototype)
+
+ LOperand* function() const { return input(); }
+};
+
+
class LLoadElements: public LUnaryOperation {
public:
explicit LLoadElements(LOperand* obj) : LUnaryOperation(obj) { }
@@ -1655,21 +1662,25 @@
class LCheckPrototypeMaps: public LInstruction {
public:
- LCheckPrototypeMaps(LOperand* temp,
+ LCheckPrototypeMaps(LOperand* temp1,
+ LOperand* temp2,
Handle<JSObject> holder,
Handle<Map> receiver_map)
- : temp_(temp),
+ : temp1_(temp1),
+ temp2_(temp2),
holder_(holder),
receiver_map_(receiver_map) { }
DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
- LOperand* temp() const { return temp_; }
+ LOperand* temp1() const { return temp1_; }
+ LOperand* temp2() const { return temp2_; }
Handle<JSObject> holder() const { return holder_; }
Handle<Map> receiver_map() const { return receiver_map_; }
private:
- LOperand* temp_;
+ LOperand* temp1_;
+ LOperand* temp2_;
Handle<JSObject> holder_;
Handle<Map> receiver_map_;
};
@@ -2051,7 +2062,6 @@
LInstruction* Define(LInstruction* instr);
LInstruction* DefineAsRegister(LInstruction* instr);
LInstruction* DefineAsSpilled(LInstruction* instr, int index);
- LInstruction* DefineSameAsAny(LInstruction* instr);
LInstruction* DefineSameAsFirst(LInstruction* instr);
LInstruction* DefineFixed(LInstruction* instr, Register reg);
LInstruction* DefineFixedDouble(LInstruction* instr, DoubleRegister reg);
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index dfc4891..bb2461c 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -598,7 +598,7 @@
DoubleRegister dbl_scratch = d0;
LUnallocated marker_operand(LUnallocated::NONE);
- Register core_scratch = r9;
+ Register core_scratch = scratch0();
bool destroys_core_scratch = false;
LGapResolver resolver(move->move_operands(), &marker_operand);
@@ -730,7 +730,53 @@
void LCodeGen::DoCallStub(LCallStub* instr) {
- Abort("DoCallStub unimplemented.");
+ ASSERT(ToRegister(instr->result()).is(r0));
+ switch (instr->hydrogen()->major_key()) {
+ case CodeStub::RegExpConstructResult: {
+ RegExpConstructResultStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::RegExpExec: {
+ RegExpExecStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::SubString: {
+ SubStringStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::StringCharAt: {
+ Abort("StringCharAtStub unimplemented.");
+ break;
+ }
+ case CodeStub::MathPow: {
+ Abort("MathPowStub unimplemented.");
+ break;
+ }
+ case CodeStub::NumberToString: {
+ NumberToStringStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::StringAdd: {
+ StringAddStub stub(NO_STRING_ADD_FLAGS);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::StringCompare: {
+ StringCompareStub stub;
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ break;
+ }
+ case CodeStub::TranscendentalCache: {
+ Abort("TranscendentalCache unimplemented.");
+ break;
+ }
+ default:
+ UNREACHABLE();
+ }
}
@@ -750,8 +796,8 @@
void LCodeGen::DoMulI(LMulI* instr) {
+ Register scratch = scratch0();
Register left = ToRegister(instr->left());
- Register scratch = r9;
Register right = EmitLoadRegister(instr->right(), scratch);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
@@ -813,6 +859,7 @@
void LCodeGen::DoShiftI(LShiftI* instr) {
+ Register scratch = scratch0();
LOperand* left = instr->left();
LOperand* right = instr->right();
ASSERT(left->Equals(instr->result()));
@@ -820,21 +867,21 @@
Register result = ToRegister(left);
if (right->IsRegister()) {
// Mask the right operand.
- __ and_(r9, ToRegister(right), Operand(0x1F));
+ __ and_(scratch, ToRegister(right), Operand(0x1F));
switch (instr->op()) {
case Token::SAR:
- __ mov(result, Operand(result, ASR, r9));
+ __ mov(result, Operand(result, ASR, scratch));
break;
case Token::SHR:
if (instr->can_deopt()) {
- __ mov(result, Operand(result, LSR, r9), SetCC);
+ __ mov(result, Operand(result, LSR, scratch), SetCC);
DeoptimizeIf(mi, instr->environment());
} else {
- __ mov(result, Operand(result, LSR, r9));
+ __ mov(result, Operand(result, LSR, scratch));
}
break;
case Token::SHL:
- __ mov(result, Operand(result, LSL, r9));
+ __ mov(result, Operand(result, LSL, scratch));
break;
default:
UNREACHABLE();
@@ -898,24 +945,18 @@
}
-void LCodeGen::DoArrayLength(LArrayLength* instr) {
+void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Register result = ToRegister(instr->result());
+ Register array = ToRegister(instr->input());
+ __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
+}
- if (instr->hydrogen()->value()->IsLoadElements()) {
- // We load the length directly from the elements array.
- Register elements = ToRegister(instr->input());
- __ ldr(result, FieldMemOperand(elements, FixedArray::kLengthOffset));
- } else {
- // Check that the receiver really is an array.
- Register array = ToRegister(instr->input());
- Register temporary = ToRegister(instr->temporary());
- __ CompareObjectType(array, temporary, temporary, JS_ARRAY_TYPE);
- DeoptimizeIf(ne, instr->environment());
- // Load length directly from the array.
- __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
- }
- Abort("DoArrayLength untested.");
+void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
+ Register result = ToRegister(instr->result());
+ Register array = ToRegister(instr->input());
+ __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
+ Abort("DoFixedArrayLength untested.");
}
@@ -1065,11 +1106,10 @@
// Test for double values. Zero is false.
Label call_stub;
DoubleRegister dbl_scratch = d0;
- Register core_scratch = r9;
- ASSERT(!reg.is(core_scratch));
- __ ldr(core_scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
+ Register scratch = scratch0();
+ __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
- __ cmp(core_scratch, Operand(ip));
+ __ cmp(scratch, Operand(ip));
__ b(ne, &call_stub);
__ sub(ip, reg, Operand(kHeapObjectTag));
__ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
@@ -1176,11 +1216,41 @@
void LCodeGen::DoIsNull(LIsNull* instr) {
- Abort("DoIsNull unimplemented.");
+ Register reg = ToRegister(instr->input());
+ Register result = ToRegister(instr->result());
+
+ __ LoadRoot(ip, Heap::kNullValueRootIndex);
+ __ cmp(reg, ip);
+ if (instr->is_strict()) {
+ __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
+ __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
+ } else {
+ Label true_value, false_value, done;
+ __ b(eq, &true_value);
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+ __ cmp(ip, reg);
+ __ b(eq, &true_value);
+ __ tst(reg, Operand(kSmiTagMask));
+ __ b(eq, &false_value);
+ // Check for undetectable objects by looking in the bit field in
+ // the map. The object has already been smi checked.
+ Register scratch = result;
+ __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
+ __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
+ __ tst(scratch, Operand(1 << Map::kIsUndetectable));
+ __ b(ne, &true_value);
+ __ bind(&false_value);
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
+ __ jmp(&done);
+ __ bind(&true_value);
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
+ __ bind(&done);
+ }
}
void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
+ Register scratch = scratch0();
Register reg = ToRegister(instr->input());
// TODO(fsc): If the expression is known to be a smi, then it's
@@ -1204,7 +1274,6 @@
__ b(eq, false_label);
// Check for undetectable objects by looking in the bit field in
// the map. The object has already been smi checked.
- Register scratch = ToRegister(instr->temp());
__ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
__ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
__ tst(scratch, Operand(1 << Map::kIsUndetectable));
@@ -1282,8 +1351,8 @@
void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
+ Register scratch = scratch0();
Register input = ToRegister(instr->input());
- Register temp = ToRegister(instr->temp());
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1293,7 +1362,7 @@
__ tst(input, Operand(kSmiTagMask));
__ b(eq, false_label);
- __ CompareObjectType(input, temp, temp, instr->TestType());
+ __ CompareObjectType(input, scratch, scratch, instr->TestType());
EmitBranch(true_block, false_block, instr->BranchCondition());
}
@@ -1332,19 +1401,28 @@
void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
- Abort("DoCmpMapAndBranch unimplemented.");
+ Register reg = ToRegister(instr->input());
+ Register temp = ToRegister(instr->temp());
+ int true_block = instr->true_block_id();
+ int false_block = instr->false_block_id();
+
+ __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
+ __ cmp(temp, Operand(instr->map()));
+ EmitBranch(true_block, false_block, eq);
}
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
- // We expect object and function in registers r1 and r0.
+ ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
+ ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
+
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Label true_value, done;
__ tst(r0, r0);
- __ mov(r0, Operand(Factory::false_value()), LeaveCC, eq);
- __ mov(r0, Operand(Factory::true_value()), LeaveCC, ne);
+ __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne);
+ __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq);
}
@@ -1432,7 +1510,14 @@
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
- Abort("DoLoadNamedField unimplemented.");
+ Register object = ToRegister(instr->input());
+ Register result = ToRegister(instr->result());
+ if (instr->hydrogen()->is_in_object()) {
+ __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
+ } else {
+ __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
+ __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
+ }
}
@@ -1447,6 +1532,50 @@
}
+void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
+ Register scratch = scratch0();
+ Register function = ToRegister(instr->function());
+ Register result = ToRegister(instr->result());
+
+ // Check that the function really is a function. Load map into the
+ // result register.
+ __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
+ DeoptimizeIf(ne, instr->environment());
+
+ // Make sure that the function has an instance prototype.
+ Label non_instance;
+ __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
+ __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
+ __ b(ne, &non_instance);
+
+ // Get the prototype or initial map from the function.
+ __ ldr(result,
+ FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
+
+ // Check that the function has a prototype or an initial map.
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ cmp(result, ip);
+ DeoptimizeIf(eq, instr->environment());
+
+ // If the function does not have an initial map, we're done.
+ Label done;
+ __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
+ __ b(ne, &done);
+
+ // Get the prototype from the initial map.
+ __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
+ __ jmp(&done);
+
+ // Non-instance prototype: Fetch prototype from constructor field
+ // in initial map.
+ __ bind(&non_instance);
+ __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
+
+ // All done.
+ __ bind(&done);
+}
+
+
void LCodeGen::DoLoadElements(LLoadElements* instr) {
Abort("DoLoadElements unimplemented.");
}
@@ -1544,7 +1673,9 @@
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
- Abort("DoCallConstantFunction unimplemented.");
+ ASSERT(ToRegister(instr->result()).is(r0));
+ __ mov(r1, Operand(instr->function()));
+ CallKnownFunction(instr->function(), instr->arity(), instr);
}
@@ -1604,7 +1735,13 @@
void LCodeGen::DoCallFunction(LCallFunction* instr) {
- Abort("DoCallFunction unimplemented.");
+ ASSERT(ToRegister(instr->result()).is(r0));
+
+ int arity = instr->arity();
+ CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ __ Drop(1);
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
@@ -1652,7 +1789,8 @@
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
- Abort("DoBoundsCheck unimplemented.");
+ __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
+ DeoptimizeIf(hs, instr->environment());
}
@@ -1757,10 +1895,10 @@
};
DoubleRegister input_reg = ToDoubleRegister(instr->input());
+ Register scratch = scratch0();
Register reg = ToRegister(instr->result());
Register temp1 = ToRegister(instr->temp1());
Register temp2 = ToRegister(instr->temp2());
- Register scratch = r9;
DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
@@ -1808,8 +1946,7 @@
void LCodeGen::EmitNumberUntagD(Register input_reg,
DoubleRegister result_reg,
LEnvironment* env) {
- Register core_scratch = r9;
- ASSERT(!input_reg.is(core_scratch));
+ Register scratch = scratch0();
SwVfpRegister flt_scratch = s0;
ASSERT(!result_reg.is(d0));
@@ -1820,9 +1957,9 @@
__ b(eq, &load_smi);
// Heap number map check.
- __ ldr(core_scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
+ __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
- __ cmp(core_scratch, Operand(ip));
+ __ cmp(scratch, Operand(ip));
__ b(eq, &heap_number);
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
@@ -1864,16 +2001,15 @@
void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Label done;
Register input_reg = ToRegister(instr->input());
- Register core_scratch = r9;
- ASSERT(!input_reg.is(core_scratch));
+ Register scratch = scratch0();
DoubleRegister dbl_scratch = d0;
SwVfpRegister flt_scratch = s0;
DoubleRegister dbl_tmp = ToDoubleRegister(instr->temp());
// Heap number map check.
- __ ldr(core_scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
+ __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
- __ cmp(core_scratch, Operand(ip));
+ __ cmp(scratch, Operand(ip));
if (instr->truncating()) {
Label heap_number;
@@ -1985,33 +2121,99 @@
void LCodeGen::DoCheckMap(LCheckMap* instr) {
+ Register scratch = scratch0();
LOperand* input = instr->input();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- __ ldr(r9, FieldMemOperand(reg, HeapObject::kMapOffset));
- __ cmp(r9, Operand(instr->hydrogen()->map()));
+ __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
+ __ cmp(scratch, Operand(instr->hydrogen()->map()));
DeoptimizeIf(ne, instr->environment());
}
void LCodeGen::LoadPrototype(Register result,
Handle<JSObject> prototype) {
- Abort("LoadPrototype unimplemented.");
+ if (Heap::InNewSpace(*prototype)) {
+ Handle<JSGlobalPropertyCell> cell =
+ Factory::NewJSGlobalPropertyCell(prototype);
+ __ mov(result, Operand(cell));
+ } else {
+ __ mov(result, Operand(prototype));
+ }
}
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- Abort("DoCheckPrototypeMaps unimplemented.");
+ Register temp1 = ToRegister(instr->temp1());
+ Register temp2 = ToRegister(instr->temp2());
+
+ Handle<JSObject> holder = instr->holder();
+ Handle<Map> receiver_map = instr->receiver_map();
+ Handle<JSObject> current_prototype(JSObject::cast(receiver_map->prototype()));
+
+ // Load prototype object.
+ LoadPrototype(temp1, current_prototype);
+
+ // Check prototype maps up to the holder.
+ while (!current_prototype.is_identical_to(holder)) {
+ __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
+ __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
+ DeoptimizeIf(ne, instr->environment());
+ current_prototype =
+ Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
+ // Load next prototype object.
+ LoadPrototype(temp1, current_prototype);
+ }
+
+ // Check the holder map.
+ __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
+ __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
+ DeoptimizeIf(ne, instr->environment());
}
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
- Abort("DoArrayLiteral unimplemented.");
+ __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
+ __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
+ __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
+ __ Push(r3, r2, r1);
+
+ // Pick the right runtime function or stub to call.
+ int length = instr->hydrogen()->length();
+ if (instr->hydrogen()->IsCopyOnWrite()) {
+ ASSERT(instr->hydrogen()->depth() == 1);
+ FastCloneShallowArrayStub::Mode mode =
+ FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
+ FastCloneShallowArrayStub stub(mode, length);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ } else if (instr->hydrogen()->depth() > 1) {
+ CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
+ } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
+ } else {
+ FastCloneShallowArrayStub::Mode mode =
+ FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ FastCloneShallowArrayStub stub(mode, length);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ }
}
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
- Abort("DoObjectLiteral unimplemented.");
+ __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
+ __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
+ __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
+ __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
+ __ Push(r4, r3, r2, r1);
+
+ // Pick the right runtime function to call.
+ if (instr->hydrogen()->depth() > 1) {
+ CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
+ } else {
+ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
+ }
}
@@ -2056,8 +2258,7 @@
Register input,
Handle<String> type_name) {
Condition final_branch_condition = no_condition;
- Register core_scratch = r9;
- ASSERT(!input.is(core_scratch));
+ Register scratch = scratch0();
if (type_name->Equals(Heap::number_symbol())) {
__ tst(input, Operand(kSmiTagMask));
__ b(eq, true_label);
@@ -2073,7 +2274,7 @@
__ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
__ tst(ip, Operand(1 << Map::kIsUndetectable));
__ b(ne, false_label);
- __ CompareInstanceType(input, core_scratch, FIRST_NONSTRING_TYPE);
+ __ CompareInstanceType(input, scratch, FIRST_NONSTRING_TYPE);
final_branch_condition = lo;
} else if (type_name->Equals(Heap::boolean_symbol())) {
@@ -2099,10 +2300,10 @@
} else if (type_name->Equals(Heap::function_symbol())) {
__ tst(input, Operand(kSmiTagMask));
__ b(eq, false_label);
- __ CompareObjectType(input, input, core_scratch, JS_FUNCTION_TYPE);
+ __ CompareObjectType(input, input, scratch, JS_FUNCTION_TYPE);
__ b(eq, true_label);
// Regular expressions => 'function' (they are callable).
- __ CompareInstanceType(input, core_scratch, JS_REGEXP_TYPE);
+ __ CompareInstanceType(input, scratch, JS_REGEXP_TYPE);
final_branch_condition = eq;
} else if (type_name->Equals(Heap::object_symbol())) {
@@ -2112,16 +2313,16 @@
__ cmp(input, ip);
__ b(eq, true_label);
// Regular expressions => 'function', not 'object'.
- __ CompareObjectType(input, input, core_scratch, JS_REGEXP_TYPE);
+ __ CompareObjectType(input, input, scratch, JS_REGEXP_TYPE);
__ b(eq, false_label);
// Check for undetectable objects => false.
__ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
__ tst(ip, Operand(1 << Map::kIsUndetectable));
__ b(ne, false_label);
// Check for JS objects => true.
- __ CompareInstanceType(input, core_scratch, FIRST_JS_OBJECT_TYPE);
+ __ CompareInstanceType(input, scratch, FIRST_JS_OBJECT_TYPE);
__ b(lo, false_label);
- __ CompareInstanceType(input, core_scratch, LAST_JS_OBJECT_TYPE);
+ __ CompareInstanceType(input, scratch, LAST_JS_OBJECT_TYPE);
final_branch_condition = ls;
} else {
diff --git a/src/arm/lithium-codegen-arm.h b/src/arm/lithium-codegen-arm.h
index 541a699..608efa9 100644
--- a/src/arm/lithium-codegen-arm.h
+++ b/src/arm/lithium-codegen-arm.h
@@ -103,6 +103,8 @@
HGraph* graph() const { return chunk_->graph(); }
MacroAssembler* masm() const { return masm_; }
+ Register scratch0() { return r9; }
+
int GetNextEmittedBlock(int block);
LInstruction* GetNextInstruction();