Version 3.20.5
Ensured that the length of frozen arrays is immutable (issue 2711,259548)
Performance and stability improvements on all platforms.
git-svn-id: http://v8.googlecode.com/svn/trunk@15657 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index a07bbcd..8c90831 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -343,8 +343,7 @@
}
Label table_start;
__ bind(&table_start);
- Label needs_frame_not_call;
- Label needs_frame_is_call;
+ Label needs_frame;
for (int i = 0; i < deopt_jump_table_.length(); i++) {
__ bind(&deopt_jump_table_[i].label);
Address entry = deopt_jump_table_[i].address;
@@ -357,45 +356,24 @@
}
if (deopt_jump_table_[i].needs_frame) {
__ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry)));
- if (type == Deoptimizer::LAZY) {
- if (needs_frame_is_call.is_bound()) {
- __ b(&needs_frame_is_call);
- } else {
- __ bind(&needs_frame_is_call);
- __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
- // This variant of deopt can only be used with stubs. Since we don't
- // have a function pointer to install in the stack frame that we're
- // building, install a special marker there instead.
- ASSERT(info()->IsStub());
- __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
- __ push(scratch0());
- __ add(fp, sp, Operand(2 * kPointerSize));
- __ mov(lr, Operand(pc), LeaveCC, al);
- __ mov(pc, ip);
- }
+ if (needs_frame.is_bound()) {
+ __ b(&needs_frame);
} else {
- if (needs_frame_not_call.is_bound()) {
- __ b(&needs_frame_not_call);
- } else {
- __ bind(&needs_frame_not_call);
- __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
- // This variant of deopt can only be used with stubs. Since we don't
- // have a function pointer to install in the stack frame that we're
- // building, install a special marker there instead.
- ASSERT(info()->IsStub());
- __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
- __ push(scratch0());
- __ add(fp, sp, Operand(2 * kPointerSize));
- __ mov(pc, ip);
- }
+ __ bind(&needs_frame);
+ __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
+ // This variant of deopt can only be used with stubs. Since we don't
+ // have a function pointer to install in the stack frame that we're
+ // building, install a special marker there instead.
+ ASSERT(info()->IsStub());
+ __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
+ __ push(scratch0());
+ __ add(fp, sp, Operand(2 * kPointerSize));
+ __ mov(lr, Operand(pc), LeaveCC, al);
+ __ mov(pc, ip);
}
} else {
- if (type == Deoptimizer::LAZY) {
- __ mov(lr, Operand(pc), LeaveCC, al);
- __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
- } else {
- __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
- }
+ __ mov(lr, Operand(pc), LeaveCC, al);
+ __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
}
masm()->CheckConstPool(false, false);
}
@@ -803,13 +781,8 @@
}
ASSERT(info()->IsStub() || frame_is_built_);
- bool needs_lazy_deopt = info()->IsStub();
if (cc == al && frame_is_built_) {
- if (needs_lazy_deopt) {
- __ Call(entry, RelocInfo::RUNTIME_ENTRY);
- } else {
- __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
- }
+ __ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.