blob: d25ca4958cf0ab375d39d8224e7d26441baf6dba [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
43 int deoptimization_index)
44 : codegen_(codegen),
45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { }
47 virtual ~SafepointGenerator() { }
48
Ben Murdoch257744e2011-11-30 15:57:28 +000049 virtual void BeforeCall(int call_size) const {
Steve Block44f0eee2011-05-26 01:26:41 +010050 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65
Ben Murdoch257744e2011-11-30 15:57:28 +000066 virtual void AfterCall() const {
Ben Murdochb0fe1622011-05-05 13:52:32 +010067 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
68 }
69
70 private:
71 LCodeGen* codegen_;
72 LPointerMap* pointers_;
73 int deoptimization_index_;
74};
75
76
77#define __ masm()->
78
79bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused());
82 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3);
84 CpuFeatures::Scope scope2(ARMv7);
85 return GeneratePrologue() &&
86 GenerateBody() &&
87 GenerateDeferredCode() &&
Ben Murdoch257744e2011-11-30 15:57:28 +000088 GenerateDeoptJumpTable() &&
Ben Murdochb0fe1622011-05-05 13:52:32 +010089 GenerateSafepointTable();
90}
91
92
93void LCodeGen::FinishCode(Handle<Code> code) {
94 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000095 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010096 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010097 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010098 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010099}
100
101
102void LCodeGen::Abort(const char* format, ...) {
103 if (FLAG_trace_bailout) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100104 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
105 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100106 va_list arguments;
107 va_start(arguments, format);
108 OS::VPrint(format, arguments);
109 va_end(arguments);
110 PrintF("\n");
111 }
112 status_ = ABORTED;
113}
114
115
116void LCodeGen::Comment(const char* format, ...) {
117 if (!FLAG_code_comments) return;
118 char buffer[4 * KB];
119 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
120 va_list arguments;
121 va_start(arguments, format);
122 builder.AddFormattedList(format, arguments);
123 va_end(arguments);
124
125 // Copy the string before recording it in the assembler to avoid
126 // issues when the stack allocated buffer goes out of scope.
127 size_t length = builder.position();
128 Vector<char> copy = Vector<char>::New(length + 1);
129 memcpy(copy.start(), builder.Finalize(), copy.length());
130 masm()->RecordComment(copy.start());
131}
132
133
134bool LCodeGen::GeneratePrologue() {
135 ASSERT(is_generating());
136
137#ifdef DEBUG
138 if (strlen(FLAG_stop_at) > 0 &&
139 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
140 __ stop("stop_at");
141 }
142#endif
143
144 // r1: Callee's JS function.
145 // cp: Callee's context.
146 // fp: Caller's frame pointer.
147 // lr: Caller's pc.
148
Ben Murdoch257744e2011-11-30 15:57:28 +0000149 // Strict mode functions need to replace the receiver with undefined
150 // when called as functions (without an explicit receiver
151 // object). r5 is zero for method calls and non-zero for function
152 // calls.
153 if (info_->is_strict_mode()) {
154 Label ok;
155 __ cmp(r5, Operand(0));
156 __ b(eq, &ok);
157 int receiver_offset = scope()->num_parameters() * kPointerSize;
158 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
159 __ str(r2, MemOperand(sp, receiver_offset));
160 __ bind(&ok);
161 }
162
Ben Murdochb0fe1622011-05-05 13:52:32 +0100163 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
164 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
165
166 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000167 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100168 if (slots > 0) {
169 if (FLAG_debug_code) {
170 __ mov(r0, Operand(slots));
171 __ mov(r2, Operand(kSlotsZapValue));
172 Label loop;
173 __ bind(&loop);
174 __ push(r2);
175 __ sub(r0, r0, Operand(1), SetCC);
176 __ b(ne, &loop);
177 } else {
178 __ sub(sp, sp, Operand(slots * kPointerSize));
179 }
180 }
181
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100182 // Possibly allocate a local context.
183 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (heap_slots > 0) {
185 Comment(";;; Allocate local context");
186 // Argument to NewContext is the function, which is in r1.
187 __ push(r1);
188 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
189 FastNewContextStub stub(heap_slots);
190 __ CallStub(&stub);
191 } else {
192 __ CallRuntime(Runtime::kNewContext, 1);
193 }
194 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
195 // Context is returned in both r0 and cp. It replaces the context
196 // passed to us. It's saved in the stack and kept live in cp.
197 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
198 // Copy any necessary parameters into the context.
199 int num_parameters = scope()->num_parameters();
200 for (int i = 0; i < num_parameters; i++) {
201 Slot* slot = scope()->parameter(i)->AsSlot();
202 if (slot != NULL && slot->type() == Slot::CONTEXT) {
203 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204 (num_parameters - 1 - i) * kPointerSize;
205 // Load parameter from stack.
206 __ ldr(r0, MemOperand(fp, parameter_offset));
207 // Store it in the context.
208 __ mov(r1, Operand(Context::SlotOffset(slot->index())));
209 __ str(r0, MemOperand(cp, r1));
210 // Update the write barrier. This clobbers all involved
211 // registers, so we have to use two more registers to avoid
212 // clobbering cp.
213 __ mov(r2, Operand(cp));
214 __ RecordWrite(r2, Operand(r1), r3, r0);
215 }
216 }
217 Comment(";;; End allocate local context");
218 }
219
Ben Murdochb0fe1622011-05-05 13:52:32 +0100220 // Trace the call.
221 if (FLAG_trace) {
222 __ CallRuntime(Runtime::kTraceEnter, 0);
223 }
224 return !is_aborted();
225}
226
227
228bool LCodeGen::GenerateBody() {
229 ASSERT(is_generating());
230 bool emit_instructions = true;
231 for (current_instruction_ = 0;
232 !is_aborted() && current_instruction_ < instructions_->length();
233 current_instruction_++) {
234 LInstruction* instr = instructions_->at(current_instruction_);
235 if (instr->IsLabel()) {
236 LLabel* label = LLabel::cast(instr);
237 emit_instructions = !label->HasReplacement();
238 }
239
240 if (emit_instructions) {
241 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
242 instr->CompileToNative(this);
243 }
244 }
245 return !is_aborted();
246}
247
248
249LInstruction* LCodeGen::GetNextInstruction() {
250 if (current_instruction_ < instructions_->length() - 1) {
251 return instructions_->at(current_instruction_ + 1);
252 } else {
253 return NULL;
254 }
255}
256
257
258bool LCodeGen::GenerateDeferredCode() {
259 ASSERT(is_generating());
260 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
261 LDeferredCode* code = deferred_[i];
262 __ bind(code->entry());
263 code->Generate();
264 __ jmp(code->exit());
265 }
266
Ben Murdoch257744e2011-11-30 15:57:28 +0000267 // Force constant pool emission at the end of the deferred code to make
268 // sure that no constant pools are emitted after.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100269 masm()->CheckConstPool(true, false);
270
Ben Murdoch257744e2011-11-30 15:57:28 +0000271 return !is_aborted();
272}
273
274
275bool LCodeGen::GenerateDeoptJumpTable() {
276 // Check that the jump table is accessible from everywhere in the function
277 // code, ie that offsets to the table can be encoded in the 24bit signed
278 // immediate of a branch instruction.
279 // To simplify we consider the code size from the first instruction to the
280 // end of the jump table. We also don't consider the pc load delta.
281 // Each entry in the jump table generates one instruction and inlines one
282 // 32bit data after it.
283 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
284 deopt_jump_table_.length() * 2)) {
285 Abort("Generated code is too large");
286 }
287
288 // Block the constant pool emission during the jump table emission.
289 __ BlockConstPoolFor(deopt_jump_table_.length());
290 __ RecordComment("[ Deoptimisation jump table");
291 Label table_start;
292 __ bind(&table_start);
293 for (int i = 0; i < deopt_jump_table_.length(); i++) {
294 __ bind(&deopt_jump_table_[i].label);
295 __ ldr(pc, MemOperand(pc, Assembler::kInstrSize - Assembler::kPcLoadDelta));
296 __ dd(reinterpret_cast<uint32_t>(deopt_jump_table_[i].address));
297 }
298 ASSERT(masm()->InstructionsGeneratedSince(&table_start) ==
299 deopt_jump_table_.length() * 2);
300 __ RecordComment("]");
301
302 // The deoptimization jump table is the last part of the instruction
303 // sequence. Mark the generated code as done unless we bailed out.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100304 if (!is_aborted()) status_ = DONE;
305 return !is_aborted();
306}
307
308
309bool LCodeGen::GenerateSafepointTable() {
310 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +0000311 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100312 return !is_aborted();
313}
314
315
316Register LCodeGen::ToRegister(int index) const {
317 return Register::FromAllocationIndex(index);
318}
319
320
321DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
322 return DoubleRegister::FromAllocationIndex(index);
323}
324
325
326Register LCodeGen::ToRegister(LOperand* op) const {
327 ASSERT(op->IsRegister());
328 return ToRegister(op->index());
329}
330
331
332Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
333 if (op->IsRegister()) {
334 return ToRegister(op->index());
335 } else if (op->IsConstantOperand()) {
336 __ mov(scratch, ToOperand(op));
337 return scratch;
338 } else if (op->IsStackSlot() || op->IsArgument()) {
339 __ ldr(scratch, ToMemOperand(op));
340 return scratch;
341 }
342 UNREACHABLE();
343 return scratch;
344}
345
346
347DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
348 ASSERT(op->IsDoubleRegister());
349 return ToDoubleRegister(op->index());
350}
351
352
353DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
354 SwVfpRegister flt_scratch,
355 DoubleRegister dbl_scratch) {
356 if (op->IsDoubleRegister()) {
357 return ToDoubleRegister(op->index());
358 } else if (op->IsConstantOperand()) {
359 LConstantOperand* const_op = LConstantOperand::cast(op);
360 Handle<Object> literal = chunk_->LookupLiteral(const_op);
361 Representation r = chunk_->LookupLiteralRepresentation(const_op);
362 if (r.IsInteger32()) {
363 ASSERT(literal->IsNumber());
364 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
365 __ vmov(flt_scratch, ip);
366 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
367 return dbl_scratch;
368 } else if (r.IsDouble()) {
369 Abort("unsupported double immediate");
370 } else if (r.IsTagged()) {
371 Abort("unsupported tagged immediate");
372 }
373 } else if (op->IsStackSlot() || op->IsArgument()) {
374 // TODO(regis): Why is vldr not taking a MemOperand?
375 // __ vldr(dbl_scratch, ToMemOperand(op));
376 MemOperand mem_op = ToMemOperand(op);
377 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
378 return dbl_scratch;
379 }
380 UNREACHABLE();
381 return dbl_scratch;
382}
383
384
385int LCodeGen::ToInteger32(LConstantOperand* op) const {
386 Handle<Object> value = chunk_->LookupLiteral(op);
387 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
388 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
389 value->Number());
390 return static_cast<int32_t>(value->Number());
391}
392
393
394Operand LCodeGen::ToOperand(LOperand* op) {
395 if (op->IsConstantOperand()) {
396 LConstantOperand* const_op = LConstantOperand::cast(op);
397 Handle<Object> literal = chunk_->LookupLiteral(const_op);
398 Representation r = chunk_->LookupLiteralRepresentation(const_op);
399 if (r.IsInteger32()) {
400 ASSERT(literal->IsNumber());
401 return Operand(static_cast<int32_t>(literal->Number()));
402 } else if (r.IsDouble()) {
403 Abort("ToOperand Unsupported double immediate.");
404 }
405 ASSERT(r.IsTagged());
406 return Operand(literal);
407 } else if (op->IsRegister()) {
408 return Operand(ToRegister(op));
409 } else if (op->IsDoubleRegister()) {
410 Abort("ToOperand IsDoubleRegister unimplemented");
411 return Operand(0);
412 }
413 // Stack slots not implemented, use ToMemOperand instead.
414 UNREACHABLE();
415 return Operand(0);
416}
417
418
419MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100420 ASSERT(!op->IsRegister());
421 ASSERT(!op->IsDoubleRegister());
422 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
423 int index = op->index();
424 if (index >= 0) {
425 // Local or spill slot. Skip the frame pointer, function, and
426 // context in the fixed part of the frame.
427 return MemOperand(fp, -(index + 3) * kPointerSize);
428 } else {
429 // Incoming parameter. Skip the return address.
430 return MemOperand(fp, -(index - 1) * kPointerSize);
431 }
432}
433
434
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100435MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
436 ASSERT(op->IsDoubleStackSlot());
437 int index = op->index();
438 if (index >= 0) {
439 // Local or spill slot. Skip the frame pointer, function, context,
440 // and the first word of the double in the fixed part of the frame.
441 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
442 } else {
443 // Incoming parameter. Skip the return address and the first word of
444 // the double.
445 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
446 }
447}
448
449
Ben Murdochb8e0da22011-05-16 14:20:40 +0100450void LCodeGen::WriteTranslation(LEnvironment* environment,
451 Translation* translation) {
452 if (environment == NULL) return;
453
454 // The translation includes one command per value in the environment.
455 int translation_size = environment->values()->length();
456 // The output frame height does not include the parameters.
457 int height = translation_size - environment->parameter_count();
458
459 WriteTranslation(environment->outer(), translation);
460 int closure_id = DefineDeoptimizationLiteral(environment->closure());
461 translation->BeginFrame(environment->ast_id(), closure_id, height);
462 for (int i = 0; i < translation_size; ++i) {
463 LOperand* value = environment->values()->at(i);
464 // spilled_registers_ and spilled_double_registers_ are either
465 // both NULL or both set.
466 if (environment->spilled_registers() != NULL && value != NULL) {
467 if (value->IsRegister() &&
468 environment->spilled_registers()[value->index()] != NULL) {
469 translation->MarkDuplicate();
470 AddToTranslation(translation,
471 environment->spilled_registers()[value->index()],
472 environment->HasTaggedValueAt(i));
473 } else if (
474 value->IsDoubleRegister() &&
475 environment->spilled_double_registers()[value->index()] != NULL) {
476 translation->MarkDuplicate();
477 AddToTranslation(
478 translation,
479 environment->spilled_double_registers()[value->index()],
480 false);
481 }
482 }
483
484 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
485 }
486}
487
488
Ben Murdochb0fe1622011-05-05 13:52:32 +0100489void LCodeGen::AddToTranslation(Translation* translation,
490 LOperand* op,
491 bool is_tagged) {
492 if (op == NULL) {
493 // TODO(twuerthinger): Introduce marker operands to indicate that this value
494 // is not present and must be reconstructed from the deoptimizer. Currently
495 // this is only used for the arguments object.
496 translation->StoreArgumentsObject();
497 } else if (op->IsStackSlot()) {
498 if (is_tagged) {
499 translation->StoreStackSlot(op->index());
500 } else {
501 translation->StoreInt32StackSlot(op->index());
502 }
503 } else if (op->IsDoubleStackSlot()) {
504 translation->StoreDoubleStackSlot(op->index());
505 } else if (op->IsArgument()) {
506 ASSERT(is_tagged);
Ben Murdoch257744e2011-11-30 15:57:28 +0000507 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100508 translation->StoreStackSlot(src_index);
509 } else if (op->IsRegister()) {
510 Register reg = ToRegister(op);
511 if (is_tagged) {
512 translation->StoreRegister(reg);
513 } else {
514 translation->StoreInt32Register(reg);
515 }
516 } else if (op->IsDoubleRegister()) {
517 DoubleRegister reg = ToDoubleRegister(op);
518 translation->StoreDoubleRegister(reg);
519 } else if (op->IsConstantOperand()) {
520 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
521 int src_index = DefineDeoptimizationLiteral(literal);
522 translation->StoreLiteral(src_index);
523 } else {
524 UNREACHABLE();
525 }
526}
527
528
529void LCodeGen::CallCode(Handle<Code> code,
530 RelocInfo::Mode mode,
531 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100532 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
533}
534
535
536void LCodeGen::CallCodeGeneric(Handle<Code> code,
537 RelocInfo::Mode mode,
538 LInstruction* instr,
539 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100540 ASSERT(instr != NULL);
541 LPointerMap* pointers = instr->pointer_map();
542 RecordPosition(pointers->position());
543 __ Call(code, mode);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100544 RegisterLazyDeoptimization(instr, safepoint_mode);
Ben Murdoch18a6f572011-07-25 17:16:09 +0100545
546 // Signal that we don't inline smi code before these stubs in the
547 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000548 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdoch18a6f572011-07-25 17:16:09 +0100549 code->kind() == Code::COMPARE_IC) {
550 __ nop();
551 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100552}
553
554
Steve Block44f0eee2011-05-26 01:26:41 +0100555void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100556 int num_arguments,
557 LInstruction* instr) {
558 ASSERT(instr != NULL);
559 LPointerMap* pointers = instr->pointer_map();
560 ASSERT(pointers != NULL);
561 RecordPosition(pointers->position());
562
563 __ CallRuntime(function, num_arguments);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100564 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100565}
566
567
Ben Murdoch8b112d22011-06-08 16:22:53 +0100568void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
569 int argc,
570 LInstruction* instr) {
571 __ CallRuntimeSaveDoubles(id);
572 RecordSafepointWithRegisters(
573 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
574}
575
576
577void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
578 SafepointMode safepoint_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100579 // Create the environment to bailout to. If the call has side effects
580 // execution has to continue after the call otherwise execution can continue
581 // from a previous bailout point repeating the call.
582 LEnvironment* deoptimization_environment;
583 if (instr->HasDeoptimizationEnvironment()) {
584 deoptimization_environment = instr->deoptimization_environment();
585 } else {
586 deoptimization_environment = instr->environment();
587 }
588
589 RegisterEnvironmentForDeoptimization(deoptimization_environment);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100590 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
591 RecordSafepoint(instr->pointer_map(),
592 deoptimization_environment->deoptimization_index());
593 } else {
594 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
595 RecordSafepointWithRegisters(
596 instr->pointer_map(),
597 0,
598 deoptimization_environment->deoptimization_index());
599 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100600}
601
602
603void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
604 if (!environment->HasBeenRegistered()) {
605 // Physical stack frame layout:
606 // -x ............. -4 0 ..................................... y
607 // [incoming arguments] [spill slots] [pushed outgoing arguments]
608
609 // Layout of the environment:
610 // 0 ..................................................... size-1
611 // [parameters] [locals] [expression stack including arguments]
612
613 // Layout of the translation:
614 // 0 ........................................................ size - 1 + 4
615 // [expression stack including arguments] [locals] [4 words] [parameters]
616 // |>------------ translation_size ------------<|
617
618 int frame_count = 0;
619 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
620 ++frame_count;
621 }
622 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100623 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100624 int deoptimization_index = deoptimizations_.length();
625 environment->Register(deoptimization_index, translation.index());
626 deoptimizations_.Add(environment);
627 }
628}
629
630
631void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
632 RegisterEnvironmentForDeoptimization(environment);
633 ASSERT(environment->HasBeenRegistered());
634 int id = environment->deoptimization_index();
635 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
636 ASSERT(entry != NULL);
637 if (entry == NULL) {
638 Abort("bailout was not prepared");
639 return;
640 }
641
642 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
643
644 if (FLAG_deopt_every_n_times == 1 &&
645 info_->shared_info()->opt_count() == id) {
646 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
647 return;
648 }
649
Ben Murdoch257744e2011-11-30 15:57:28 +0000650 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt", cc);
651
Steve Block1e0659c2011-05-24 12:43:12 +0100652 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100653 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
654 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000655 // We often have several deopts to the same entry, reuse the last
656 // jump entry if this is the case.
657 if (deopt_jump_table_.is_empty() ||
658 (deopt_jump_table_.last().address != entry)) {
659 deopt_jump_table_.Add(JumpTableEntry(entry));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100660 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000661 __ b(cc, &deopt_jump_table_.last().label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100662 }
663}
664
665
666void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
667 int length = deoptimizations_.length();
668 if (length == 0) return;
669 ASSERT(FLAG_deopt);
670 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100671 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100672
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100673 Handle<ByteArray> translations = translations_.CreateByteArray();
674 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100675 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
676
677 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100678 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100679 for (int i = 0; i < deoptimization_literals_.length(); i++) {
680 literals->set(i, *deoptimization_literals_[i]);
681 }
682 data->SetLiteralArray(*literals);
683
684 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
685 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
686
687 // Populate the deoptimization entries.
688 for (int i = 0; i < length; i++) {
689 LEnvironment* env = deoptimizations_[i];
690 data->SetAstId(i, Smi::FromInt(env->ast_id()));
691 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
692 data->SetArgumentsStackHeight(i,
693 Smi::FromInt(env->arguments_stack_height()));
694 }
695 code->set_deoptimization_data(*data);
696}
697
698
699int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
700 int result = deoptimization_literals_.length();
701 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
702 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
703 }
704 deoptimization_literals_.Add(literal);
705 return result;
706}
707
708
709void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
710 ASSERT(deoptimization_literals_.length() == 0);
711
712 const ZoneList<Handle<JSFunction> >* inlined_closures =
713 chunk()->inlined_closures();
714
715 for (int i = 0, length = inlined_closures->length();
716 i < length;
717 i++) {
718 DefineDeoptimizationLiteral(inlined_closures->at(i));
719 }
720
721 inlined_function_count_ = deoptimization_literals_.length();
722}
723
724
Steve Block1e0659c2011-05-24 12:43:12 +0100725void LCodeGen::RecordSafepoint(
726 LPointerMap* pointers,
727 Safepoint::Kind kind,
728 int arguments,
729 int deoptimization_index) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100730 ASSERT(expected_safepoint_kind_ == kind);
731
Ben Murdochb0fe1622011-05-05 13:52:32 +0100732 const ZoneList<LOperand*>* operands = pointers->operands();
733 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100734 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100735 for (int i = 0; i < operands->length(); i++) {
736 LOperand* pointer = operands->at(i);
737 if (pointer->IsStackSlot()) {
738 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100739 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
740 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100741 }
742 }
Steve Block1e0659c2011-05-24 12:43:12 +0100743 if (kind & Safepoint::kWithRegisters) {
744 // Register cp always contains a pointer to the context.
745 safepoint.DefinePointerRegister(cp);
746 }
747}
748
749
750void LCodeGen::RecordSafepoint(LPointerMap* pointers,
751 int deoptimization_index) {
752 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100753}
754
755
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100756void LCodeGen::RecordSafepoint(int deoptimization_index) {
757 LPointerMap empty_pointers(RelocInfo::kNoPosition);
758 RecordSafepoint(&empty_pointers, deoptimization_index);
759}
760
761
Ben Murdochb0fe1622011-05-05 13:52:32 +0100762void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
763 int arguments,
764 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100765 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
766 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100767}
768
769
Ben Murdochb8e0da22011-05-16 14:20:40 +0100770void LCodeGen::RecordSafepointWithRegistersAndDoubles(
771 LPointerMap* pointers,
772 int arguments,
773 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100774 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
775 deoptimization_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100776}
777
778
Ben Murdochb0fe1622011-05-05 13:52:32 +0100779void LCodeGen::RecordPosition(int position) {
780 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
781 masm()->positions_recorder()->RecordPosition(position);
782}
783
784
785void LCodeGen::DoLabel(LLabel* label) {
786 if (label->is_loop_header()) {
787 Comment(";;; B%d - LOOP entry", label->block_id());
788 } else {
789 Comment(";;; B%d", label->block_id());
790 }
791 __ bind(label->label());
792 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +0000793 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100794}
795
796
797void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100798 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100799}
800
801
802void LCodeGen::DoGap(LGap* gap) {
803 for (int i = LGap::FIRST_INNER_POSITION;
804 i <= LGap::LAST_INNER_POSITION;
805 i++) {
806 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
807 LParallelMove* move = gap->GetParallelMove(inner_pos);
808 if (move != NULL) DoParallelMove(move);
809 }
810
811 LInstruction* next = GetNextInstruction();
812 if (next != NULL && next->IsLazyBailout()) {
813 int pc = masm()->pc_offset();
814 safepoints_.SetPcAfterGap(pc);
815 }
816}
817
818
Ben Murdoch257744e2011-11-30 15:57:28 +0000819void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
820 DoGap(instr);
821}
822
823
Ben Murdochb0fe1622011-05-05 13:52:32 +0100824void LCodeGen::DoParameter(LParameter* instr) {
825 // Nothing to do.
826}
827
828
829void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100830 ASSERT(ToRegister(instr->result()).is(r0));
831 switch (instr->hydrogen()->major_key()) {
832 case CodeStub::RegExpConstructResult: {
833 RegExpConstructResultStub stub;
834 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
835 break;
836 }
837 case CodeStub::RegExpExec: {
838 RegExpExecStub stub;
839 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
840 break;
841 }
842 case CodeStub::SubString: {
843 SubStringStub stub;
844 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
845 break;
846 }
Steve Block9fac8402011-05-12 15:51:54 +0100847 case CodeStub::NumberToString: {
848 NumberToStringStub stub;
849 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
850 break;
851 }
852 case CodeStub::StringAdd: {
853 StringAddStub stub(NO_STRING_ADD_FLAGS);
854 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
855 break;
856 }
857 case CodeStub::StringCompare: {
858 StringCompareStub stub;
859 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
860 break;
861 }
862 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100863 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100864 TranscendentalCacheStub stub(instr->transcendental_type(),
865 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100866 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100867 break;
868 }
869 default:
870 UNREACHABLE();
871 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100872}
873
874
875void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
876 // Nothing to do.
877}
878
879
880void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100881 if (instr->hydrogen()->HasPowerOf2Divisor()) {
882 Register dividend = ToRegister(instr->InputAt(0));
883
884 int32_t divisor =
885 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
886
887 if (divisor < 0) divisor = -divisor;
888
889 Label positive_dividend, done;
890 __ cmp(dividend, Operand(0));
891 __ b(pl, &positive_dividend);
892 __ rsb(dividend, dividend, Operand(0));
893 __ and_(dividend, dividend, Operand(divisor - 1));
894 __ rsb(dividend, dividend, Operand(0), SetCC);
895 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
896 __ b(ne, &done);
897 DeoptimizeIf(al, instr->environment());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +0100898 } else {
899 __ b(&done);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100900 }
Steve Block44f0eee2011-05-26 01:26:41 +0100901 __ bind(&positive_dividend);
902 __ and_(dividend, dividend, Operand(divisor - 1));
903 __ bind(&done);
904 return;
905 }
906
Ben Murdochb8e0da22011-05-16 14:20:40 +0100907 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100908 Register left = ToRegister(instr->InputAt(0));
909 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100910 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100911
Steve Block44f0eee2011-05-26 01:26:41 +0100912 Register scratch = scratch0();
913 Register scratch2 = ToRegister(instr->TempAt(0));
914 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
915 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
916 DwVfpRegister quotient = double_scratch0();
917
918 ASSERT(result.is(left));
919
920 ASSERT(!dividend.is(divisor));
921 ASSERT(!dividend.is(quotient));
922 ASSERT(!divisor.is(quotient));
923 ASSERT(!scratch.is(left));
924 ASSERT(!scratch.is(right));
925 ASSERT(!scratch.is(result));
926
927 Label done, vfp_modulo, both_positive, right_negative;
928
Ben Murdochb8e0da22011-05-16 14:20:40 +0100929 // Check for x % 0.
930 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100931 __ cmp(right, Operand(0));
932 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100933 }
934
Steve Block44f0eee2011-05-26 01:26:41 +0100935 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100936 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100937 __ b(eq, &done);
938 // Preload right in a vfp register.
939 __ vmov(divisor.low(), right);
940 __ b(lt, &vfp_modulo);
941
942 __ cmp(left, Operand(right));
943 __ b(lt, &done);
944
945 // Check for (positive) power of two on the right hand side.
946 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
947 scratch,
948 &right_negative,
949 &both_positive);
950 // Perform modulo operation (scratch contains right - 1).
951 __ and_(result, scratch, Operand(left));
952 __ b(&done);
953
954 __ bind(&right_negative);
955 // Negate right. The sign of the divisor does not matter.
956 __ rsb(right, right, Operand(0));
957
958 __ bind(&both_positive);
959 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100960 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100961 // left hand side, the left hand side is the result.
962 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100963 __ mov(scratch, left);
964 for (int i = 0; i < kUnfolds; i++) {
965 // Check if the left hand side is less or equal than the
966 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100967 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100968 __ mov(result, scratch, LeaveCC, lt);
969 __ b(lt, &done);
970 // If not, reduce the left hand side by the right hand
971 // side and check again.
972 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
973 }
974
Steve Block44f0eee2011-05-26 01:26:41 +0100975 __ bind(&vfp_modulo);
976 // Load the arguments in VFP registers.
977 // The divisor value is preloaded before. Be careful that 'right' is only live
978 // on entry.
979 __ vmov(dividend.low(), left);
980 // From here on don't use right as it may have been reallocated (for example
981 // to scratch2).
982 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +0100983
Steve Block44f0eee2011-05-26 01:26:41 +0100984 __ vcvt_f64_s32(dividend, dividend.low());
985 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100986
Steve Block44f0eee2011-05-26 01:26:41 +0100987 // We do not care about the sign of the divisor.
988 __ vabs(divisor, divisor);
989 // Compute the quotient and round it to a 32bit integer.
990 __ vdiv(quotient, dividend, divisor);
991 __ vcvt_s32_f64(quotient.low(), quotient);
992 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100993
Steve Block44f0eee2011-05-26 01:26:41 +0100994 // Compute the remainder in result.
995 DwVfpRegister double_scratch = dividend;
996 __ vmul(double_scratch, divisor, quotient);
997 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
998 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100999
Steve Block44f0eee2011-05-26 01:26:41 +01001000 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1001 __ sub(result, left, scratch);
1002 } else {
1003 Label ok;
1004 // Check for -0.
1005 __ sub(scratch2, left, scratch, SetCC);
1006 __ b(ne, &ok);
1007 __ cmp(left, Operand(0));
1008 DeoptimizeIf(mi, instr->environment());
1009 __ bind(&ok);
1010 // Load the result and we are done.
1011 __ mov(result, scratch2);
1012 }
1013
Ben Murdochb8e0da22011-05-16 14:20:40 +01001014 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001015}
1016
1017
1018void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001019 class DeferredDivI: public LDeferredCode {
1020 public:
1021 DeferredDivI(LCodeGen* codegen, LDivI* instr)
1022 : LDeferredCode(codegen), instr_(instr) { }
1023 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001024 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001025 }
1026 private:
1027 LDivI* instr_;
1028 };
1029
Steve Block1e0659c2011-05-24 12:43:12 +01001030 const Register left = ToRegister(instr->InputAt(0));
1031 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001032 const Register scratch = scratch0();
1033 const Register result = ToRegister(instr->result());
1034
1035 // Check for x / 0.
1036 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001037 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001038 DeoptimizeIf(eq, instr->environment());
1039 }
1040
1041 // Check for (0 / -x) that will produce negative zero.
1042 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1043 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +01001044 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001045 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01001046 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001047 DeoptimizeIf(mi, instr->environment());
1048 __ bind(&left_not_zero);
1049 }
1050
1051 // Check for (-kMinInt / -1).
1052 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1053 Label left_not_min_int;
1054 __ cmp(left, Operand(kMinInt));
1055 __ b(ne, &left_not_min_int);
1056 __ cmp(right, Operand(-1));
1057 DeoptimizeIf(eq, instr->environment());
1058 __ bind(&left_not_min_int);
1059 }
1060
1061 Label done, deoptimize;
1062 // Test for a few common cases first.
1063 __ cmp(right, Operand(1));
1064 __ mov(result, left, LeaveCC, eq);
1065 __ b(eq, &done);
1066
1067 __ cmp(right, Operand(2));
1068 __ tst(left, Operand(1), eq);
1069 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1070 __ b(eq, &done);
1071
1072 __ cmp(right, Operand(4));
1073 __ tst(left, Operand(3), eq);
1074 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1075 __ b(eq, &done);
1076
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001077 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001078 // to be tagged to Smis. If that is not possible, deoptimize.
1079 DeferredDivI* deferred = new DeferredDivI(this, instr);
1080
1081 __ TrySmiTag(left, &deoptimize, scratch);
1082 __ TrySmiTag(right, &deoptimize, scratch);
1083
1084 __ b(al, deferred->entry());
1085 __ bind(deferred->exit());
1086
1087 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001088 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001089 __ SmiUntag(result);
1090 __ b(&done);
1091
1092 __ bind(&deoptimize);
1093 DeoptimizeIf(al, instr->environment());
1094 __ bind(&done);
1095}
1096
1097
Steve Block1e0659c2011-05-24 12:43:12 +01001098template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001099void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1100 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001101 Register left = ToRegister(instr->InputAt(0));
1102 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001103
Ben Murdoch8b112d22011-06-08 16:22:53 +01001104 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001105 // Move left to r1 and right to r0 for the stub call.
1106 if (left.is(r1)) {
1107 __ Move(r0, right);
1108 } else if (left.is(r0) && right.is(r1)) {
1109 __ Swap(r0, r1, r2);
1110 } else if (left.is(r0)) {
1111 ASSERT(!right.is(r1));
1112 __ mov(r1, r0);
1113 __ mov(r0, right);
1114 } else {
1115 ASSERT(!left.is(r0) && !right.is(r0));
1116 __ mov(r0, right);
1117 __ mov(r1, left);
1118 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001119 BinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001120 __ CallStub(&stub);
1121 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1122 0,
1123 Safepoint::kNoDeoptimizationIndex);
1124 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001125 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001126}
1127
1128
1129void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001130 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001131 Register left = ToRegister(instr->InputAt(0));
1132 Register right = EmitLoadRegister(instr->InputAt(1), scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001133
1134 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
Steve Block1e0659c2011-05-24 12:43:12 +01001135 !instr->InputAt(1)->IsConstantOperand()) {
1136 __ orr(ToRegister(instr->TempAt(0)), left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001137 }
1138
1139 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1140 // scratch:left = left * right.
Steve Block1e0659c2011-05-24 12:43:12 +01001141 __ smull(left, scratch, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001142 __ mov(ip, Operand(left, ASR, 31));
1143 __ cmp(ip, Operand(scratch));
1144 DeoptimizeIf(ne, instr->environment());
1145 } else {
1146 __ mul(left, left, right);
1147 }
1148
1149 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1150 // Bail out if the result is supposed to be negative zero.
1151 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01001152 __ cmp(left, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001153 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001154 if (instr->InputAt(1)->IsConstantOperand()) {
1155 if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) <= 0) {
1156 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001157 }
1158 } else {
1159 // Test the non-zero operand for negative sign.
Steve Block1e0659c2011-05-24 12:43:12 +01001160 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001161 DeoptimizeIf(mi, instr->environment());
1162 }
1163 __ bind(&done);
1164 }
1165}
1166
1167
1168void LCodeGen::DoBitI(LBitI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001169 LOperand* left = instr->InputAt(0);
1170 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001171 ASSERT(left->Equals(instr->result()));
1172 ASSERT(left->IsRegister());
1173 Register result = ToRegister(left);
Steve Block44f0eee2011-05-26 01:26:41 +01001174 Operand right_operand(no_reg);
1175
1176 if (right->IsStackSlot() || right->IsArgument()) {
1177 Register right_reg = EmitLoadRegister(right, ip);
1178 right_operand = Operand(right_reg);
1179 } else {
1180 ASSERT(right->IsRegister() || right->IsConstantOperand());
1181 right_operand = ToOperand(right);
1182 }
1183
Ben Murdochb0fe1622011-05-05 13:52:32 +01001184 switch (instr->op()) {
1185 case Token::BIT_AND:
Steve Block44f0eee2011-05-26 01:26:41 +01001186 __ and_(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001187 break;
1188 case Token::BIT_OR:
Steve Block44f0eee2011-05-26 01:26:41 +01001189 __ orr(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001190 break;
1191 case Token::BIT_XOR:
Steve Block44f0eee2011-05-26 01:26:41 +01001192 __ eor(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001193 break;
1194 default:
1195 UNREACHABLE();
1196 break;
1197 }
1198}
1199
1200
1201void LCodeGen::DoShiftI(LShiftI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001202 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001203 LOperand* left = instr->InputAt(0);
1204 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001205 ASSERT(left->Equals(instr->result()));
1206 ASSERT(left->IsRegister());
1207 Register result = ToRegister(left);
1208 if (right->IsRegister()) {
1209 // Mask the right operand.
Steve Block9fac8402011-05-12 15:51:54 +01001210 __ and_(scratch, ToRegister(right), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001211 switch (instr->op()) {
1212 case Token::SAR:
Steve Block9fac8402011-05-12 15:51:54 +01001213 __ mov(result, Operand(result, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001214 break;
1215 case Token::SHR:
1216 if (instr->can_deopt()) {
Steve Block9fac8402011-05-12 15:51:54 +01001217 __ mov(result, Operand(result, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001218 DeoptimizeIf(mi, instr->environment());
1219 } else {
Steve Block9fac8402011-05-12 15:51:54 +01001220 __ mov(result, Operand(result, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001221 }
1222 break;
1223 case Token::SHL:
Steve Block9fac8402011-05-12 15:51:54 +01001224 __ mov(result, Operand(result, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001225 break;
1226 default:
1227 UNREACHABLE();
1228 break;
1229 }
1230 } else {
1231 int value = ToInteger32(LConstantOperand::cast(right));
1232 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1233 switch (instr->op()) {
1234 case Token::SAR:
1235 if (shift_count != 0) {
1236 __ mov(result, Operand(result, ASR, shift_count));
1237 }
1238 break;
1239 case Token::SHR:
1240 if (shift_count == 0 && instr->can_deopt()) {
1241 __ tst(result, Operand(0x80000000));
1242 DeoptimizeIf(ne, instr->environment());
1243 } else {
1244 __ mov(result, Operand(result, LSR, shift_count));
1245 }
1246 break;
1247 case Token::SHL:
1248 if (shift_count != 0) {
1249 __ mov(result, Operand(result, LSL, shift_count));
1250 }
1251 break;
1252 default:
1253 UNREACHABLE();
1254 break;
1255 }
1256 }
1257}
1258
1259
1260void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001261 LOperand* left = instr->InputAt(0);
1262 LOperand* right = instr->InputAt(1);
1263 ASSERT(left->Equals(instr->result()));
1264 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1265 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1266
1267 if (right->IsStackSlot() || right->IsArgument()) {
1268 Register right_reg = EmitLoadRegister(right, ip);
1269 __ sub(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1270 } else {
1271 ASSERT(right->IsRegister() || right->IsConstantOperand());
1272 __ sub(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1273 }
1274
1275 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001276 DeoptimizeIf(vs, instr->environment());
1277 }
1278}
1279
1280
1281void LCodeGen::DoConstantI(LConstantI* instr) {
1282 ASSERT(instr->result()->IsRegister());
1283 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1284}
1285
1286
1287void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001288 ASSERT(instr->result()->IsDoubleRegister());
1289 DwVfpRegister result = ToDoubleRegister(instr->result());
1290 double v = instr->value();
1291 __ vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001292}
1293
1294
1295void LCodeGen::DoConstantT(LConstantT* instr) {
1296 ASSERT(instr->result()->IsRegister());
1297 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1298}
1299
1300
Steve Block9fac8402011-05-12 15:51:54 +01001301void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001302 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001303 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001304 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1305}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001306
Ben Murdochb0fe1622011-05-05 13:52:32 +01001307
Steve Block44f0eee2011-05-26 01:26:41 +01001308void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001309 Register result = ToRegister(instr->result());
1310 Register array = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01001311 __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01001312}
1313
1314
Steve Block9fac8402011-05-12 15:51:54 +01001315void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1316 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001317 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001318 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001319}
1320
1321
1322void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001323 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001324 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001325 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001326 ASSERT(input.is(result));
1327 Label done;
1328
1329 // If the object is a smi return the object.
1330 __ tst(input, Operand(kSmiTagMask));
1331 __ b(eq, &done);
1332
1333 // If the object is not a value type, return the object.
1334 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1335 __ b(ne, &done);
1336 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1337
1338 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001339}
1340
1341
1342void LCodeGen::DoBitNotI(LBitNotI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001343 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001344 ASSERT(input->Equals(instr->result()));
1345 __ mvn(ToRegister(input), Operand(ToRegister(input)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001346}
1347
1348
1349void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001350 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001351 __ push(input_reg);
1352 CallRuntime(Runtime::kThrow, 1, instr);
1353
1354 if (FLAG_debug_code) {
1355 __ stop("Unreachable code.");
1356 }
1357}
1358
1359
1360void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001361 LOperand* left = instr->InputAt(0);
1362 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001363 ASSERT(left->Equals(instr->result()));
Steve Block44f0eee2011-05-26 01:26:41 +01001364 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1365 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001366
Steve Block44f0eee2011-05-26 01:26:41 +01001367 if (right->IsStackSlot() || right->IsArgument()) {
1368 Register right_reg = EmitLoadRegister(right, ip);
1369 __ add(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1370 } else {
1371 ASSERT(right->IsRegister() || right->IsConstantOperand());
1372 __ add(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1373 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001374
Steve Block44f0eee2011-05-26 01:26:41 +01001375 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001376 DeoptimizeIf(vs, instr->environment());
1377 }
1378}
1379
1380
1381void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001382 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1383 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001384 switch (instr->op()) {
1385 case Token::ADD:
1386 __ vadd(left, left, right);
1387 break;
1388 case Token::SUB:
1389 __ vsub(left, left, right);
1390 break;
1391 case Token::MUL:
1392 __ vmul(left, left, right);
1393 break;
1394 case Token::DIV:
1395 __ vdiv(left, left, right);
1396 break;
1397 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001398 // Save r0-r3 on the stack.
1399 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1400
Ben Murdoch257744e2011-11-30 15:57:28 +00001401 __ PrepareCallCFunction(0, 2, scratch0());
1402 __ SetCallCDoubleArguments(left, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001403 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00001404 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1405 0, 2);
Steve Block1e0659c2011-05-24 12:43:12 +01001406 // Move the result in the double result register.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001407 __ GetCFunctionDoubleResult(ToDoubleRegister(instr->result()));
Steve Block1e0659c2011-05-24 12:43:12 +01001408
1409 // Restore r0-r3.
1410 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001411 break;
1412 }
1413 default:
1414 UNREACHABLE();
1415 break;
1416 }
1417}
1418
1419
1420void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001421 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1422 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001423 ASSERT(ToRegister(instr->result()).is(r0));
1424
Ben Murdoch257744e2011-11-30 15:57:28 +00001425 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001426 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001427 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001428}
1429
1430
1431int LCodeGen::GetNextEmittedBlock(int block) {
1432 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1433 LLabel* label = chunk_->GetLabel(i);
1434 if (!label->HasReplacement()) return i;
1435 }
1436 return -1;
1437}
1438
1439
1440void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1441 int next_block = GetNextEmittedBlock(current_block_);
1442 right_block = chunk_->LookupDestination(right_block);
1443 left_block = chunk_->LookupDestination(left_block);
1444
1445 if (right_block == left_block) {
1446 EmitGoto(left_block);
1447 } else if (left_block == next_block) {
1448 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1449 } else if (right_block == next_block) {
1450 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1451 } else {
1452 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1453 __ b(chunk_->GetAssemblyLabel(right_block));
1454 }
1455}
1456
1457
1458void LCodeGen::DoBranch(LBranch* instr) {
1459 int true_block = chunk_->LookupDestination(instr->true_block_id());
1460 int false_block = chunk_->LookupDestination(instr->false_block_id());
1461
1462 Representation r = instr->hydrogen()->representation();
1463 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001464 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001465 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001466 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001467 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001468 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001469 Register scratch = scratch0();
1470
Ben Murdochb8e0da22011-05-16 14:20:40 +01001471 // Test the double value. Zero and NaN are false.
1472 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1473 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch6d7cb002011-08-04 19:25:22 +01001474 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001475 } else {
1476 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001477 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001478 if (instr->hydrogen()->type().IsBoolean()) {
1479 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1480 __ cmp(reg, ip);
1481 EmitBranch(true_block, false_block, eq);
1482 } else {
1483 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1484 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1485
1486 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1487 __ cmp(reg, ip);
1488 __ b(eq, false_label);
1489 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1490 __ cmp(reg, ip);
1491 __ b(eq, true_label);
1492 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1493 __ cmp(reg, ip);
1494 __ b(eq, false_label);
1495 __ cmp(reg, Operand(0));
1496 __ b(eq, false_label);
1497 __ tst(reg, Operand(kSmiTagMask));
1498 __ b(eq, true_label);
1499
Ben Murdochb8e0da22011-05-16 14:20:40 +01001500 // Test double values. Zero and NaN are false.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001501 Label call_stub;
1502 DoubleRegister dbl_scratch = d0;
Steve Block9fac8402011-05-12 15:51:54 +01001503 Register scratch = scratch0();
1504 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001505 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01001506 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001507 __ b(ne, &call_stub);
1508 __ sub(ip, reg, Operand(kHeapObjectTag));
1509 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001510 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1511 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001512 __ b(ne, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001513 __ b(true_label);
1514
1515 // The conversion stub doesn't cause garbage collections so it's
1516 // safe to not record a safepoint after the call.
1517 __ bind(&call_stub);
1518 ToBooleanStub stub(reg);
1519 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1520 __ stm(db_w, sp, saved_regs);
1521 __ CallStub(&stub);
1522 __ cmp(reg, Operand(0));
1523 __ ldm(ia_w, sp, saved_regs);
Steve Block1e0659c2011-05-24 12:43:12 +01001524 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001525 }
1526 }
1527}
1528
1529
1530void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001531 block = chunk_->LookupDestination(block);
1532 int next_block = GetNextEmittedBlock(current_block_);
1533 if (block != next_block) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001534 // Perform stack overflow check if this goto needs it before jumping.
1535 if (deferred_stack_check != NULL) {
1536 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1537 __ cmp(sp, Operand(ip));
1538 __ b(hs, chunk_->GetAssemblyLabel(block));
1539 __ jmp(deferred_stack_check->entry());
1540 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1541 } else {
1542 __ jmp(chunk_->GetAssemblyLabel(block));
1543 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001544 }
1545}
1546
1547
1548void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001549 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
1550 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001551}
1552
1553
1554void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001555 class DeferredStackCheck: public LDeferredCode {
1556 public:
1557 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1558 : LDeferredCode(codegen), instr_(instr) { }
1559 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1560 private:
1561 LGoto* instr_;
1562 };
1563
1564 DeferredStackCheck* deferred = NULL;
1565 if (instr->include_stack_check()) {
1566 deferred = new DeferredStackCheck(this, instr);
1567 }
1568 EmitGoto(instr->block_id(), deferred);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001569}
1570
1571
1572Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001573 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001574 switch (op) {
1575 case Token::EQ:
1576 case Token::EQ_STRICT:
1577 cond = eq;
1578 break;
1579 case Token::LT:
1580 cond = is_unsigned ? lo : lt;
1581 break;
1582 case Token::GT:
1583 cond = is_unsigned ? hi : gt;
1584 break;
1585 case Token::LTE:
1586 cond = is_unsigned ? ls : le;
1587 break;
1588 case Token::GTE:
1589 cond = is_unsigned ? hs : ge;
1590 break;
1591 case Token::IN:
1592 case Token::INSTANCEOF:
1593 default:
1594 UNREACHABLE();
1595 }
1596 return cond;
1597}
1598
1599
1600void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001601 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001602}
1603
1604
1605void LCodeGen::DoCmpID(LCmpID* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001606 LOperand* left = instr->InputAt(0);
1607 LOperand* right = instr->InputAt(1);
1608 LOperand* result = instr->result();
1609 Register scratch = scratch0();
1610
1611 Label unordered, done;
1612 if (instr->is_double()) {
1613 // Compare left and right as doubles and load the
1614 // resulting flags into the normal status register.
1615 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1616 // If a NaN is involved, i.e. the result is unordered (V set),
1617 // jump to unordered to return false.
1618 __ b(vs, &unordered);
1619 } else {
1620 EmitCmpI(left, right);
1621 }
1622
1623 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1624 __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1625 __ b(cc, &done);
1626
1627 __ bind(&unordered);
1628 __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1629 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001630}
1631
1632
1633void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001634 LOperand* left = instr->InputAt(0);
1635 LOperand* right = instr->InputAt(1);
1636 int false_block = chunk_->LookupDestination(instr->false_block_id());
1637 int true_block = chunk_->LookupDestination(instr->true_block_id());
1638
1639 if (instr->is_double()) {
1640 // Compare left and right as doubles and load the
1641 // resulting flags into the normal status register.
1642 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1643 // If a NaN is involved, i.e. the result is unordered (V set),
1644 // jump to false block label.
1645 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1646 } else {
1647 EmitCmpI(left, right);
1648 }
1649
1650 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1651 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001652}
1653
1654
1655void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001656 Register left = ToRegister(instr->InputAt(0));
1657 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001658 Register result = ToRegister(instr->result());
1659
1660 __ cmp(left, Operand(right));
1661 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1662 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001663}
1664
1665
1666void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001667 Register left = ToRegister(instr->InputAt(0));
1668 Register right = ToRegister(instr->InputAt(1));
1669 int false_block = chunk_->LookupDestination(instr->false_block_id());
1670 int true_block = chunk_->LookupDestination(instr->true_block_id());
1671
1672 __ cmp(left, Operand(right));
1673 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001674}
1675
1676
Ben Murdoch257744e2011-11-30 15:57:28 +00001677void LCodeGen::DoCmpSymbolEq(LCmpSymbolEq* instr) {
1678 Register left = ToRegister(instr->InputAt(0));
1679 Register right = ToRegister(instr->InputAt(1));
1680 Register result = ToRegister(instr->result());
1681
1682 __ cmp(left, Operand(right));
1683 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1684 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1685}
1686
1687
1688void LCodeGen::DoCmpSymbolEqAndBranch(LCmpSymbolEqAndBranch* instr) {
1689 Register left = ToRegister(instr->InputAt(0));
1690 Register right = ToRegister(instr->InputAt(1));
1691 int false_block = chunk_->LookupDestination(instr->false_block_id());
1692 int true_block = chunk_->LookupDestination(instr->true_block_id());
1693
1694 __ cmp(left, Operand(right));
1695 EmitBranch(true_block, false_block, eq);
1696}
1697
1698
Ben Murdochb0fe1622011-05-05 13:52:32 +01001699void LCodeGen::DoIsNull(LIsNull* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001700 Register reg = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001701 Register result = ToRegister(instr->result());
1702
1703 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1704 __ cmp(reg, ip);
1705 if (instr->is_strict()) {
1706 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1707 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1708 } else {
1709 Label true_value, false_value, done;
1710 __ b(eq, &true_value);
1711 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1712 __ cmp(ip, reg);
1713 __ b(eq, &true_value);
1714 __ tst(reg, Operand(kSmiTagMask));
1715 __ b(eq, &false_value);
1716 // Check for undetectable objects by looking in the bit field in
1717 // the map. The object has already been smi checked.
1718 Register scratch = result;
1719 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1720 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1721 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1722 __ b(ne, &true_value);
1723 __ bind(&false_value);
1724 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1725 __ jmp(&done);
1726 __ bind(&true_value);
1727 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1728 __ bind(&done);
1729 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001730}
1731
1732
1733void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001734 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001735 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001736
1737 // TODO(fsc): If the expression is known to be a smi, then it's
1738 // definitely not null. Jump to the false block.
1739
1740 int true_block = chunk_->LookupDestination(instr->true_block_id());
1741 int false_block = chunk_->LookupDestination(instr->false_block_id());
1742
1743 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1744 __ cmp(reg, ip);
1745 if (instr->is_strict()) {
1746 EmitBranch(true_block, false_block, eq);
1747 } else {
1748 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1749 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1750 __ b(eq, true_label);
1751 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1752 __ cmp(reg, ip);
1753 __ b(eq, true_label);
1754 __ tst(reg, Operand(kSmiTagMask));
1755 __ b(eq, false_label);
1756 // Check for undetectable objects by looking in the bit field in
1757 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001758 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1759 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1760 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1761 EmitBranch(true_block, false_block, ne);
1762 }
1763}
1764
1765
1766Condition LCodeGen::EmitIsObject(Register input,
1767 Register temp1,
1768 Register temp2,
1769 Label* is_not_object,
1770 Label* is_object) {
Steve Block1e0659c2011-05-24 12:43:12 +01001771 __ JumpIfSmi(input, is_not_object);
1772
1773 __ LoadRoot(temp1, Heap::kNullValueRootIndex);
1774 __ cmp(input, temp1);
1775 __ b(eq, is_object);
1776
1777 // Load map.
1778 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1779 // Undetectable objects behave like undefined.
1780 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1781 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1782 __ b(ne, is_not_object);
1783
1784 // Load instance type and check that it is in object type range.
1785 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
1786 __ cmp(temp2, Operand(FIRST_JS_OBJECT_TYPE));
1787 __ b(lt, is_not_object);
1788 __ cmp(temp2, Operand(LAST_JS_OBJECT_TYPE));
1789 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001790}
1791
1792
1793void LCodeGen::DoIsObject(LIsObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001794 Register reg = ToRegister(instr->InputAt(0));
1795 Register result = ToRegister(instr->result());
1796 Register temp = scratch0();
1797 Label is_false, is_true, done;
1798
1799 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1800 __ b(true_cond, &is_true);
1801
1802 __ bind(&is_false);
1803 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1804 __ b(&done);
1805
1806 __ bind(&is_true);
1807 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1808
1809 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001810}
1811
1812
1813void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001814 Register reg = ToRegister(instr->InputAt(0));
1815 Register temp1 = ToRegister(instr->TempAt(0));
1816 Register temp2 = scratch0();
1817
1818 int true_block = chunk_->LookupDestination(instr->true_block_id());
1819 int false_block = chunk_->LookupDestination(instr->false_block_id());
1820 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1821 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1822
1823 Condition true_cond =
1824 EmitIsObject(reg, temp1, temp2, false_label, true_label);
1825
1826 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001827}
1828
1829
1830void LCodeGen::DoIsSmi(LIsSmi* instr) {
1831 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1832 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001833 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001834 __ tst(input_reg, Operand(kSmiTagMask));
1835 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1836 Label done;
1837 __ b(eq, &done);
1838 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1839 __ bind(&done);
1840}
1841
1842
1843void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1844 int true_block = chunk_->LookupDestination(instr->true_block_id());
1845 int false_block = chunk_->LookupDestination(instr->false_block_id());
1846
Steve Block1e0659c2011-05-24 12:43:12 +01001847 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001848 __ tst(input_reg, Operand(kSmiTagMask));
1849 EmitBranch(true_block, false_block, eq);
1850}
1851
1852
Ben Murdoch257744e2011-11-30 15:57:28 +00001853void LCodeGen::DoIsUndetectable(LIsUndetectable* instr) {
1854 Register input = ToRegister(instr->InputAt(0));
1855 Register result = ToRegister(instr->result());
1856
1857 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1858 Label false_label, done;
1859 __ JumpIfSmi(input, &false_label);
1860 __ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
1861 __ ldrb(result, FieldMemOperand(result, Map::kBitFieldOffset));
1862 __ tst(result, Operand(1 << Map::kIsUndetectable));
1863 __ b(eq, &false_label);
1864 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1865 __ jmp(&done);
1866 __ bind(&false_label);
1867 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1868 __ bind(&done);
1869}
1870
1871
1872void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1873 Register input = ToRegister(instr->InputAt(0));
1874 Register temp = ToRegister(instr->TempAt(0));
1875
1876 int true_block = chunk_->LookupDestination(instr->true_block_id());
1877 int false_block = chunk_->LookupDestination(instr->false_block_id());
1878
1879 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1880 __ ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
1881 __ ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
1882 __ tst(temp, Operand(1 << Map::kIsUndetectable));
1883 EmitBranch(true_block, false_block, ne);
1884}
1885
1886
Steve Block1e0659c2011-05-24 12:43:12 +01001887static InstanceType TestType(HHasInstanceType* instr) {
1888 InstanceType from = instr->from();
1889 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001890 if (from == FIRST_TYPE) return to;
1891 ASSERT(from == to || to == LAST_TYPE);
1892 return from;
1893}
1894
1895
Steve Block1e0659c2011-05-24 12:43:12 +01001896static Condition BranchCondition(HHasInstanceType* instr) {
1897 InstanceType from = instr->from();
1898 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001899 if (from == to) return eq;
1900 if (to == LAST_TYPE) return hs;
1901 if (from == FIRST_TYPE) return ls;
1902 UNREACHABLE();
1903 return eq;
1904}
1905
1906
1907void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001908 Register input = ToRegister(instr->InputAt(0));
1909 Register result = ToRegister(instr->result());
1910
1911 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1912 Label done;
1913 __ tst(input, Operand(kSmiTagMask));
1914 __ LoadRoot(result, Heap::kFalseValueRootIndex, eq);
1915 __ b(eq, &done);
1916 __ CompareObjectType(input, result, result, TestType(instr->hydrogen()));
1917 Condition cond = BranchCondition(instr->hydrogen());
1918 __ LoadRoot(result, Heap::kTrueValueRootIndex, cond);
1919 __ LoadRoot(result, Heap::kFalseValueRootIndex, NegateCondition(cond));
1920 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001921}
1922
1923
1924void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001925 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001926 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001927
1928 int true_block = chunk_->LookupDestination(instr->true_block_id());
1929 int false_block = chunk_->LookupDestination(instr->false_block_id());
1930
1931 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1932
1933 __ tst(input, Operand(kSmiTagMask));
1934 __ b(eq, false_label);
1935
Steve Block1e0659c2011-05-24 12:43:12 +01001936 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1937 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001938}
1939
1940
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001941void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1942 Register input = ToRegister(instr->InputAt(0));
1943 Register result = ToRegister(instr->result());
1944
1945 if (FLAG_debug_code) {
1946 __ AbortIfNotString(input);
1947 }
1948
1949 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1950 __ IndexFromHash(result, result);
1951}
1952
1953
Ben Murdochb0fe1622011-05-05 13:52:32 +01001954void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001955 Register input = ToRegister(instr->InputAt(0));
1956 Register result = ToRegister(instr->result());
1957 Register scratch = scratch0();
1958
1959 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1960 __ ldr(scratch,
1961 FieldMemOperand(input, String::kHashFieldOffset));
1962 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1963 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1964 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001965}
1966
1967
1968void LCodeGen::DoHasCachedArrayIndexAndBranch(
1969 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001970 Register input = ToRegister(instr->InputAt(0));
1971 Register scratch = scratch0();
1972
1973 int true_block = chunk_->LookupDestination(instr->true_block_id());
1974 int false_block = chunk_->LookupDestination(instr->false_block_id());
1975
1976 __ ldr(scratch,
1977 FieldMemOperand(input, String::kHashFieldOffset));
1978 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1979 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001980}
1981
1982
Ben Murdochb8e0da22011-05-16 14:20:40 +01001983// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001984// the temp registers, but not the input. Only input and temp2 may alias.
1985void LCodeGen::EmitClassOfTest(Label* is_true,
1986 Label* is_false,
1987 Handle<String>class_name,
1988 Register input,
1989 Register temp,
1990 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001991 ASSERT(!input.is(temp));
1992 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1993 __ tst(input, Operand(kSmiTagMask));
1994 __ b(eq, is_false);
1995 __ CompareObjectType(input, temp, temp2, FIRST_JS_OBJECT_TYPE);
1996 __ b(lt, is_false);
1997
1998 // Map is now in temp.
1999 // Functions have class 'Function'.
2000 __ CompareInstanceType(temp, temp2, JS_FUNCTION_TYPE);
2001 if (class_name->IsEqualTo(CStrVector("Function"))) {
2002 __ b(eq, is_true);
2003 } else {
2004 __ b(eq, is_false);
2005 }
2006
2007 // Check if the constructor in the map is a function.
2008 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
2009
2010 // As long as JS_FUNCTION_TYPE is the last instance type and it is
2011 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2012 // LAST_JS_OBJECT_TYPE.
2013 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2014 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2015
2016 // Objects with a non-function constructor have class 'Object'.
2017 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
2018 if (class_name->IsEqualTo(CStrVector("Object"))) {
2019 __ b(ne, is_true);
2020 } else {
2021 __ b(ne, is_false);
2022 }
2023
2024 // temp now contains the constructor function. Grab the
2025 // instance class name from there.
2026 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
2027 __ ldr(temp, FieldMemOperand(temp,
2028 SharedFunctionInfo::kInstanceClassNameOffset));
2029 // The class name we are testing against is a symbol because it's a literal.
2030 // The name in the constructor is a symbol because of the way the context is
2031 // booted. This routine isn't expected to work for random API-created
2032 // classes and it doesn't have to because you can't access it with natives
2033 // syntax. Since both sides are symbols it is sufficient to use an identity
2034 // comparison.
2035 __ cmp(temp, Operand(class_name));
2036 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002037}
2038
2039
2040void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002041 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002042 Register result = ToRegister(instr->result());
2043 ASSERT(input.is(result));
2044 Handle<String> class_name = instr->hydrogen()->class_name();
2045
2046 Label done, is_true, is_false;
2047
2048 EmitClassOfTest(&is_true, &is_false, class_name, input, scratch0(), input);
2049 __ b(ne, &is_false);
2050
2051 __ bind(&is_true);
2052 __ LoadRoot(result, Heap::kTrueValueRootIndex);
2053 __ jmp(&done);
2054
2055 __ bind(&is_false);
2056 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2057 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002058}
2059
2060
2061void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002062 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002063 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01002064 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002065 Handle<String> class_name = instr->hydrogen()->class_name();
2066
2067 int true_block = chunk_->LookupDestination(instr->true_block_id());
2068 int false_block = chunk_->LookupDestination(instr->false_block_id());
2069
2070 Label* true_label = chunk_->GetAssemblyLabel(true_block);
2071 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2072
2073 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
2074
2075 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002076}
2077
2078
2079void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002080 Register reg = ToRegister(instr->InputAt(0));
2081 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002082 int true_block = instr->true_block_id();
2083 int false_block = instr->false_block_id();
2084
2085 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
2086 __ cmp(temp, Operand(instr->map()));
2087 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002088}
2089
2090
2091void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002092 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
2093 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01002094
Ben Murdochb0fe1622011-05-05 13:52:32 +01002095 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2096 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2097
Steve Block44f0eee2011-05-26 01:26:41 +01002098 __ cmp(r0, Operand(0));
2099 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
2100 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002101}
2102
2103
2104void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002105 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
2106 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
2107
2108 int true_block = chunk_->LookupDestination(instr->true_block_id());
2109 int false_block = chunk_->LookupDestination(instr->false_block_id());
2110
2111 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2112 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01002113 __ cmp(r0, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01002114 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002115}
2116
2117
Ben Murdoch086aeea2011-05-13 15:57:08 +01002118void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002119 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2120 public:
2121 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2122 LInstanceOfKnownGlobal* instr)
2123 : LDeferredCode(codegen), instr_(instr) { }
2124 virtual void Generate() {
2125 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
2126 }
2127
2128 Label* map_check() { return &map_check_; }
2129
2130 private:
2131 LInstanceOfKnownGlobal* instr_;
2132 Label map_check_;
2133 };
2134
2135 DeferredInstanceOfKnownGlobal* deferred;
2136 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2137
2138 Label done, false_result;
2139 Register object = ToRegister(instr->InputAt(0));
2140 Register temp = ToRegister(instr->TempAt(0));
2141 Register result = ToRegister(instr->result());
2142
2143 ASSERT(object.is(r0));
2144 ASSERT(result.is(r0));
2145
2146 // A Smi is not instance of anything.
2147 __ JumpIfSmi(object, &false_result);
2148
2149 // This is the inlined call site instanceof cache. The two occurences of the
2150 // hole value will be patched to the last map/result pair generated by the
2151 // instanceof stub.
2152 Label cache_miss;
2153 Register map = temp;
2154 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2155 __ bind(deferred->map_check()); // Label for calculating code patching.
2156 // We use Factory::the_hole_value() on purpose instead of loading from the
2157 // root array to force relocation to be able to later patch with
2158 // the cached map.
Steve Block44f0eee2011-05-26 01:26:41 +01002159 __ mov(ip, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002160 __ cmp(map, Operand(ip));
2161 __ b(ne, &cache_miss);
2162 // We use Factory::the_hole_value() on purpose instead of loading from the
2163 // root array to force relocation to be able to later patch
2164 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002165 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002166 __ b(&done);
2167
2168 // The inlined call site cache did not match. Check null and string before
2169 // calling the deferred code.
2170 __ bind(&cache_miss);
2171 // Null is not instance of anything.
2172 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2173 __ cmp(object, Operand(ip));
2174 __ b(eq, &false_result);
2175
2176 // String values is not instance of anything.
2177 Condition is_string = masm_->IsObjectStringType(object, temp);
2178 __ b(is_string, &false_result);
2179
2180 // Go to the deferred code.
2181 __ b(deferred->entry());
2182
2183 __ bind(&false_result);
2184 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2185
2186 // Here result has either true or false. Deferred code also produces true or
2187 // false object.
2188 __ bind(deferred->exit());
2189 __ bind(&done);
2190}
2191
2192
2193void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2194 Label* map_check) {
2195 Register result = ToRegister(instr->result());
2196 ASSERT(result.is(r0));
2197
2198 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2199 flags = static_cast<InstanceofStub::Flags>(
2200 flags | InstanceofStub::kArgsInRegisters);
2201 flags = static_cast<InstanceofStub::Flags>(
2202 flags | InstanceofStub::kCallSiteInlineCheck);
2203 flags = static_cast<InstanceofStub::Flags>(
2204 flags | InstanceofStub::kReturnTrueFalseObject);
2205 InstanceofStub stub(flags);
2206
Ben Murdoch8b112d22011-06-08 16:22:53 +01002207 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002208
2209 // Get the temp register reserved by the instruction. This needs to be r4 as
2210 // its slot of the pushing of safepoint registers is used to communicate the
2211 // offset to the location of the map check.
2212 Register temp = ToRegister(instr->TempAt(0));
2213 ASSERT(temp.is(r4));
2214 __ mov(InstanceofStub::right(), Operand(instr->function()));
2215 static const int kAdditionalDelta = 4;
2216 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2217 Label before_push_delta;
2218 __ bind(&before_push_delta);
2219 __ BlockConstPoolFor(kAdditionalDelta);
2220 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002221 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002222 CallCodeGeneric(stub.GetCode(),
2223 RelocInfo::CODE_TARGET,
2224 instr,
2225 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Steve Block1e0659c2011-05-24 12:43:12 +01002226 // Put the result value into the result register slot and
2227 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002228 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002229}
2230
Ben Murdochb0fe1622011-05-05 13:52:32 +01002231
2232static Condition ComputeCompareCondition(Token::Value op) {
2233 switch (op) {
2234 case Token::EQ_STRICT:
2235 case Token::EQ:
2236 return eq;
2237 case Token::LT:
2238 return lt;
2239 case Token::GT:
2240 return gt;
2241 case Token::LTE:
2242 return le;
2243 case Token::GTE:
2244 return ge;
2245 default:
2246 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002247 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002248 }
2249}
2250
2251
2252void LCodeGen::DoCmpT(LCmpT* instr) {
2253 Token::Value op = instr->op();
2254
2255 Handle<Code> ic = CompareIC::GetUninitialized(op);
2256 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002257 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002258
2259 Condition condition = ComputeCompareCondition(op);
2260 if (op == Token::GT || op == Token::LTE) {
2261 condition = ReverseCondition(condition);
2262 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002263 __ LoadRoot(ToRegister(instr->result()),
2264 Heap::kTrueValueRootIndex,
2265 condition);
2266 __ LoadRoot(ToRegister(instr->result()),
2267 Heap::kFalseValueRootIndex,
2268 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002269}
2270
2271
2272void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002273 Token::Value op = instr->op();
2274 int true_block = chunk_->LookupDestination(instr->true_block_id());
2275 int false_block = chunk_->LookupDestination(instr->false_block_id());
2276
2277 Handle<Code> ic = CompareIC::GetUninitialized(op);
2278 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2279
2280 // The compare stub expects compare condition and the input operands
2281 // reversed for GT and LTE.
2282 Condition condition = ComputeCompareCondition(op);
2283 if (op == Token::GT || op == Token::LTE) {
2284 condition = ReverseCondition(condition);
2285 }
2286 __ cmp(r0, Operand(0));
2287 EmitBranch(true_block, false_block, condition);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002288}
2289
2290
2291void LCodeGen::DoReturn(LReturn* instr) {
2292 if (FLAG_trace) {
2293 // Push the return value on the stack as the parameter.
2294 // Runtime::TraceExit returns its parameter in r0.
2295 __ push(r0);
2296 __ CallRuntime(Runtime::kTraceExit, 1);
2297 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002298 int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002299 __ mov(sp, fp);
2300 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2301 __ add(sp, sp, Operand(sp_delta));
2302 __ Jump(lr);
2303}
2304
2305
Ben Murdoch8b112d22011-06-08 16:22:53 +01002306void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002307 Register result = ToRegister(instr->result());
2308 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2309 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2310 if (instr->hydrogen()->check_hole_value()) {
2311 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2312 __ cmp(result, ip);
2313 DeoptimizeIf(eq, instr->environment());
2314 }
2315}
2316
2317
Ben Murdoch8b112d22011-06-08 16:22:53 +01002318void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2319 ASSERT(ToRegister(instr->global_object()).is(r0));
2320 ASSERT(ToRegister(instr->result()).is(r0));
2321
2322 __ mov(r2, Operand(instr->name()));
2323 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2324 : RelocInfo::CODE_TARGET_CONTEXT;
2325 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2326 CallCode(ic, mode, instr);
2327}
2328
2329
2330void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002331 Register value = ToRegister(instr->InputAt(0));
2332 Register scratch = scratch0();
2333
2334 // Load the cell.
2335 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2336
2337 // If the cell we are storing to contains the hole it could have
2338 // been deleted from the property dictionary. In that case, we need
2339 // to update the property details in the property dictionary to mark
2340 // it as no longer deleted.
2341 if (instr->hydrogen()->check_hole_value()) {
2342 Register scratch2 = ToRegister(instr->TempAt(0));
2343 __ ldr(scratch2,
2344 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2345 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2346 __ cmp(scratch2, ip);
2347 DeoptimizeIf(eq, instr->environment());
2348 }
2349
2350 // Store the value.
2351 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002352}
2353
2354
Ben Murdoch8b112d22011-06-08 16:22:53 +01002355void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2356 ASSERT(ToRegister(instr->global_object()).is(r1));
2357 ASSERT(ToRegister(instr->value()).is(r0));
2358
2359 __ mov(r2, Operand(instr->name()));
2360 Handle<Code> ic = instr->strict_mode()
2361 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2362 : isolate()->builtins()->StoreIC_Initialize();
2363 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2364}
2365
2366
Ben Murdochb8e0da22011-05-16 14:20:40 +01002367void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002368 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002369 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002370 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002371}
2372
2373
Steve Block1e0659c2011-05-24 12:43:12 +01002374void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2375 Register context = ToRegister(instr->context());
2376 Register value = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002377 __ str(value, ContextOperand(context, instr->slot_index()));
2378 if (instr->needs_write_barrier()) {
2379 int offset = Context::SlotOffset(instr->slot_index());
2380 __ RecordWrite(context, Operand(offset), value, scratch0());
2381 }
2382}
2383
2384
Ben Murdochb0fe1622011-05-05 13:52:32 +01002385void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002386 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002387 Register result = ToRegister(instr->result());
2388 if (instr->hydrogen()->is_in_object()) {
2389 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2390 } else {
2391 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2392 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2393 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002394}
2395
2396
Ben Murdoch257744e2011-11-30 15:57:28 +00002397void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2398 Register object,
2399 Handle<Map> type,
2400 Handle<String> name) {
Steve Block44f0eee2011-05-26 01:26:41 +01002401 LookupResult lookup;
2402 type->LookupInDescriptors(NULL, *name, &lookup);
Ben Murdoch257744e2011-11-30 15:57:28 +00002403 ASSERT(lookup.IsProperty() &&
2404 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2405 if (lookup.type() == FIELD) {
2406 int index = lookup.GetLocalFieldIndexFromMap(*type);
2407 int offset = index * kPointerSize;
2408 if (index < 0) {
2409 // Negative property indices are in-object properties, indexed
2410 // from the end of the fixed part of the object.
2411 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2412 } else {
2413 // Non-negative property indices are in the properties array.
2414 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2415 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2416 }
Steve Block44f0eee2011-05-26 01:26:41 +01002417 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002418 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
2419 LoadHeapObject(result, Handle<HeapObject>::cast(function));
Steve Block44f0eee2011-05-26 01:26:41 +01002420 }
2421}
2422
2423
2424void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2425 Register object = ToRegister(instr->object());
2426 Register result = ToRegister(instr->result());
2427 Register scratch = scratch0();
2428 int map_count = instr->hydrogen()->types()->length();
2429 Handle<String> name = instr->hydrogen()->name();
2430 if (map_count == 0) {
2431 ASSERT(instr->hydrogen()->need_generic());
2432 __ mov(r2, Operand(name));
2433 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2434 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2435 } else {
2436 Label done;
2437 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2438 for (int i = 0; i < map_count - 1; ++i) {
2439 Handle<Map> map = instr->hydrogen()->types()->at(i);
2440 Label next;
2441 __ cmp(scratch, Operand(map));
2442 __ b(ne, &next);
Ben Murdoch257744e2011-11-30 15:57:28 +00002443 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002444 __ b(&done);
2445 __ bind(&next);
2446 }
2447 Handle<Map> map = instr->hydrogen()->types()->last();
2448 __ cmp(scratch, Operand(map));
2449 if (instr->hydrogen()->need_generic()) {
2450 Label generic;
2451 __ b(ne, &generic);
Ben Murdoch257744e2011-11-30 15:57:28 +00002452 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002453 __ b(&done);
2454 __ bind(&generic);
2455 __ mov(r2, Operand(name));
2456 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2457 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2458 } else {
2459 DeoptimizeIf(ne, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002460 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002461 }
2462 __ bind(&done);
2463 }
2464}
2465
2466
Ben Murdochb0fe1622011-05-05 13:52:32 +01002467void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2468 ASSERT(ToRegister(instr->object()).is(r0));
2469 ASSERT(ToRegister(instr->result()).is(r0));
2470
2471 // Name is always in r2.
2472 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002473 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002474 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2475}
2476
2477
Steve Block9fac8402011-05-12 15:51:54 +01002478void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2479 Register scratch = scratch0();
2480 Register function = ToRegister(instr->function());
2481 Register result = ToRegister(instr->result());
2482
2483 // Check that the function really is a function. Load map into the
2484 // result register.
2485 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2486 DeoptimizeIf(ne, instr->environment());
2487
2488 // Make sure that the function has an instance prototype.
2489 Label non_instance;
2490 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2491 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2492 __ b(ne, &non_instance);
2493
2494 // Get the prototype or initial map from the function.
2495 __ ldr(result,
2496 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2497
2498 // Check that the function has a prototype or an initial map.
2499 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2500 __ cmp(result, ip);
2501 DeoptimizeIf(eq, instr->environment());
2502
2503 // If the function does not have an initial map, we're done.
2504 Label done;
2505 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2506 __ b(ne, &done);
2507
2508 // Get the prototype from the initial map.
2509 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2510 __ jmp(&done);
2511
2512 // Non-instance prototype: Fetch prototype from constructor field
2513 // in initial map.
2514 __ bind(&non_instance);
2515 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2516
2517 // All done.
2518 __ bind(&done);
2519}
2520
2521
Ben Murdochb0fe1622011-05-05 13:52:32 +01002522void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002523 Register result = ToRegister(instr->result());
2524 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002525 Register scratch = scratch0();
2526
Steve Block1e0659c2011-05-24 12:43:12 +01002527 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002528 if (FLAG_debug_code) {
2529 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01002530 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002531 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2532 __ cmp(scratch, ip);
2533 __ b(eq, &done);
2534 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2535 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002536 __ b(eq, &done);
2537 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
2538 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2539 __ sub(scratch, scratch, Operand(FIRST_EXTERNAL_ARRAY_TYPE));
2540 __ cmp(scratch, Operand(kExternalArrayTypeCount));
2541 __ Check(cc, "Check for fast elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002542 __ bind(&done);
2543 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002544}
2545
2546
Steve Block44f0eee2011-05-26 01:26:41 +01002547void LCodeGen::DoLoadExternalArrayPointer(
2548 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002549 Register to_reg = ToRegister(instr->result());
2550 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002551 __ ldr(to_reg, FieldMemOperand(from_reg,
2552 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002553}
2554
2555
Ben Murdochb0fe1622011-05-05 13:52:32 +01002556void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002557 Register arguments = ToRegister(instr->arguments());
2558 Register length = ToRegister(instr->length());
2559 Register index = ToRegister(instr->index());
2560 Register result = ToRegister(instr->result());
2561
2562 // Bailout index is not a valid argument index. Use unsigned check to get
2563 // negative check for free.
2564 __ sub(length, length, index, SetCC);
2565 DeoptimizeIf(ls, instr->environment());
2566
2567 // There are two words between the frame pointer and the last argument.
2568 // Subtracting from length accounts for one of them add one more.
2569 __ add(length, length, Operand(1));
2570 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002571}
2572
2573
2574void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002575 Register elements = ToRegister(instr->elements());
2576 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002577 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002578 Register scratch = scratch0();
Ben Murdochb8e0da22011-05-16 14:20:40 +01002579 ASSERT(result.is(elements));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002580
2581 // Load the result.
2582 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2583 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2584
Ben Murdochb8e0da22011-05-16 14:20:40 +01002585 // Check for the hole value.
Ben Murdoch257744e2011-11-30 15:57:28 +00002586 if (instr->hydrogen()->RequiresHoleCheck()) {
2587 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2588 __ cmp(result, scratch);
2589 DeoptimizeIf(eq, instr->environment());
2590 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002591}
2592
2593
Steve Block44f0eee2011-05-26 01:26:41 +01002594void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2595 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002596 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00002597 Register key = no_reg;
Ben Murdoch8b112d22011-06-08 16:22:53 +01002598 ExternalArrayType array_type = instr->array_type();
Ben Murdoch257744e2011-11-30 15:57:28 +00002599 bool key_is_constant = instr->key()->IsConstantOperand();
2600 int constant_key = 0;
2601 if (key_is_constant) {
2602 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2603 if (constant_key & 0xF0000000) {
2604 Abort("array index constant value too big.");
2605 }
2606 } else {
2607 key = ToRegister(instr->key());
2608 }
2609 int shift_size = ExternalArrayTypeToShiftSize(array_type);
2610
2611 if (array_type == kExternalFloatArray || array_type == kExternalDoubleArray) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002612 CpuFeatures::Scope scope(VFP3);
2613 DwVfpRegister result(ToDoubleRegister(instr->result()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002614 Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
2615 : Operand(key, LSL, shift_size));
2616 __ add(scratch0(), external_pointer, operand);
2617 if (array_type == kExternalFloatArray) {
2618 __ vldr(result.low(), scratch0(), 0);
2619 __ vcvt_f64_f32(result, result.low());
2620 } else { // i.e. array_type == kExternalDoubleArray
2621 __ vldr(result, scratch0(), 0);
2622 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01002623 } else {
2624 Register result(ToRegister(instr->result()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002625 MemOperand mem_operand(key_is_constant
2626 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
2627 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002628 switch (array_type) {
2629 case kExternalByteArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00002630 __ ldrsb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002631 break;
2632 case kExternalUnsignedByteArray:
2633 case kExternalPixelArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00002634 __ ldrb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002635 break;
2636 case kExternalShortArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00002637 __ ldrsh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002638 break;
2639 case kExternalUnsignedShortArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00002640 __ ldrh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002641 break;
2642 case kExternalIntArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00002643 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002644 break;
2645 case kExternalUnsignedIntArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00002646 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002647 __ cmp(result, Operand(0x80000000));
2648 // TODO(danno): we could be more clever here, perhaps having a special
2649 // version of the stub that detects if the overflow case actually
2650 // happens, and generate code that returns a double rather than int.
2651 DeoptimizeIf(cs, instr->environment());
2652 break;
2653 case kExternalFloatArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00002654 case kExternalDoubleArray:
Ben Murdoch8b112d22011-06-08 16:22:53 +01002655 UNREACHABLE();
2656 break;
2657 }
2658 }
Steve Block1e0659c2011-05-24 12:43:12 +01002659}
2660
2661
Ben Murdochb0fe1622011-05-05 13:52:32 +01002662void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2663 ASSERT(ToRegister(instr->object()).is(r1));
2664 ASSERT(ToRegister(instr->key()).is(r0));
2665
Steve Block44f0eee2011-05-26 01:26:41 +01002666 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002667 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2668}
2669
2670
2671void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002672 Register scratch = scratch0();
2673 Register result = ToRegister(instr->result());
2674
2675 // Check if the calling frame is an arguments adaptor frame.
2676 Label done, adapted;
2677 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2678 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2679 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2680
2681 // Result is the frame pointer for the frame if not adapted and for the real
2682 // frame below the adaptor frame if adapted.
2683 __ mov(result, fp, LeaveCC, ne);
2684 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002685}
2686
2687
2688void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002689 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002690 Register result = ToRegister(instr->result());
2691
2692 Label done;
2693
2694 // If no arguments adaptor frame the number of arguments is fixed.
2695 __ cmp(fp, elem);
2696 __ mov(result, Operand(scope()->num_parameters()));
2697 __ b(eq, &done);
2698
2699 // Arguments adaptor frame present. Get argument length from there.
2700 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2701 __ ldr(result,
2702 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2703 __ SmiUntag(result);
2704
2705 // Argument length is in result register.
2706 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002707}
2708
2709
2710void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002711 Register receiver = ToRegister(instr->receiver());
2712 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002713 Register length = ToRegister(instr->length());
2714 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002715 Register scratch = scratch0();
2716 ASSERT(receiver.is(r0)); // Used for parameter count.
2717 ASSERT(function.is(r1)); // Required by InvokeFunction.
2718 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002719
Ben Murdoch257744e2011-11-30 15:57:28 +00002720 // TODO(1412): This is not correct if the called function is a
2721 // strict mode function or a native.
2722 //
Steve Block1e0659c2011-05-24 12:43:12 +01002723 // If the receiver is null or undefined, we have to pass the global object
2724 // as a receiver.
2725 Label global_object, receiver_ok;
2726 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2727 __ cmp(receiver, scratch);
2728 __ b(eq, &global_object);
2729 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2730 __ cmp(receiver, scratch);
2731 __ b(eq, &global_object);
2732
2733 // Deoptimize if the receiver is not a JS object.
2734 __ tst(receiver, Operand(kSmiTagMask));
2735 DeoptimizeIf(eq, instr->environment());
2736 __ CompareObjectType(receiver, scratch, scratch, FIRST_JS_OBJECT_TYPE);
2737 DeoptimizeIf(lo, instr->environment());
2738 __ jmp(&receiver_ok);
2739
2740 __ bind(&global_object);
2741 __ ldr(receiver, GlobalObjectOperand());
Ben Murdoch257744e2011-11-30 15:57:28 +00002742 __ ldr(receiver,
2743 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002744 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002745
2746 // Copy the arguments to this function possibly from the
2747 // adaptor frame below it.
2748 const uint32_t kArgumentsLimit = 1 * KB;
2749 __ cmp(length, Operand(kArgumentsLimit));
2750 DeoptimizeIf(hi, instr->environment());
2751
2752 // Push the receiver and use the register to keep the original
2753 // number of arguments.
2754 __ push(receiver);
2755 __ mov(receiver, length);
2756 // The arguments are at a one pointer size offset from elements.
2757 __ add(elements, elements, Operand(1 * kPointerSize));
2758
2759 // Loop through the arguments pushing them onto the execution
2760 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002761 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002762 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002763 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002764 __ b(eq, &invoke);
2765 __ bind(&loop);
2766 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2767 __ push(scratch);
2768 __ sub(length, length, Operand(1), SetCC);
2769 __ b(ne, &loop);
2770
2771 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002772 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2773 LPointerMap* pointers = instr->pointer_map();
2774 LEnvironment* env = instr->deoptimization_environment();
2775 RecordPosition(pointers->position());
2776 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002777 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002778 pointers,
2779 env->deoptimization_index());
2780 // The number of arguments is stored in receiver which is r0, as expected
2781 // by InvokeFunction.
2782 v8::internal::ParameterCount actual(receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00002783 __ InvokeFunction(function, actual, CALL_FUNCTION,
2784 safepoint_generator, CALL_AS_METHOD);
Steve Block1e0659c2011-05-24 12:43:12 +01002785 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002786}
2787
2788
2789void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002790 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002791 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2792 Abort("DoPushArgument not implemented for double type.");
2793 } else {
2794 Register argument_reg = EmitLoadRegister(argument, ip);
2795 __ push(argument_reg);
2796 }
2797}
2798
2799
Steve Block1e0659c2011-05-24 12:43:12 +01002800void LCodeGen::DoContext(LContext* instr) {
2801 Register result = ToRegister(instr->result());
2802 __ mov(result, cp);
2803}
2804
2805
2806void LCodeGen::DoOuterContext(LOuterContext* instr) {
2807 Register context = ToRegister(instr->context());
2808 Register result = ToRegister(instr->result());
2809 __ ldr(result,
2810 MemOperand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2811 __ ldr(result, FieldMemOperand(result, JSFunction::kContextOffset));
2812}
2813
2814
Ben Murdochb0fe1622011-05-05 13:52:32 +01002815void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002816 Register context = ToRegister(instr->context());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002817 Register result = ToRegister(instr->result());
2818 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2819}
2820
2821
2822void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002823 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002824 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002825 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002826}
2827
2828
2829void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2830 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00002831 LInstruction* instr,
2832 CallKind call_kind) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002833 // Change context if needed.
2834 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002835 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002836 scope()->contains_with() ||
2837 (scope()->num_heap_slots() > 0);
2838 if (change_context) {
2839 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2840 }
2841
2842 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2843 // is available to write to at this point.
2844 if (!function->NeedsArgumentsAdaption()) {
2845 __ mov(r0, Operand(arity));
2846 }
2847
2848 LPointerMap* pointers = instr->pointer_map();
2849 RecordPosition(pointers->position());
2850
2851 // Invoke function.
Ben Murdoch257744e2011-11-30 15:57:28 +00002852 __ SetCallKind(r5, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002853 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2854 __ Call(ip);
2855
2856 // Setup deoptimization.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002857 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002858
2859 // Restore context.
2860 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2861}
2862
2863
2864void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002865 ASSERT(ToRegister(instr->result()).is(r0));
2866 __ mov(r1, Operand(instr->function()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002867 CallKnownFunction(instr->function(),
2868 instr->arity(),
2869 instr,
2870 CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002871}
2872
2873
2874void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002875 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2876 Register input = ToRegister(instr->InputAt(0));
2877 Register scratch = scratch0();
2878
2879 // Deoptimize if not a heap number.
2880 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2881 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2882 __ cmp(scratch, Operand(ip));
2883 DeoptimizeIf(ne, instr->environment());
2884
2885 Label done;
2886 Register exponent = scratch0();
2887 scratch = no_reg;
2888 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2889 // Check the sign of the argument. If the argument is positive, just
2890 // return it. We do not need to patch the stack since |input| and
2891 // |result| are the same register and |input| would be restored
2892 // unchanged by popping safepoint registers.
2893 __ tst(exponent, Operand(HeapNumber::kSignMask));
2894 __ b(eq, &done);
2895
2896 // Input is negative. Reverse its sign.
2897 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002898 {
2899 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002900
Ben Murdoch8b112d22011-06-08 16:22:53 +01002901 // Registers were saved at the safepoint, so we can use
2902 // many scratch registers.
2903 Register tmp1 = input.is(r1) ? r0 : r1;
2904 Register tmp2 = input.is(r2) ? r0 : r2;
2905 Register tmp3 = input.is(r3) ? r0 : r3;
2906 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01002907
Ben Murdoch8b112d22011-06-08 16:22:53 +01002908 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01002909
Ben Murdoch8b112d22011-06-08 16:22:53 +01002910 Label allocated, slow;
2911 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2912 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2913 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01002914
Ben Murdoch8b112d22011-06-08 16:22:53 +01002915 // Slow case: Call the runtime system to do the number allocation.
2916 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01002917
Ben Murdoch8b112d22011-06-08 16:22:53 +01002918 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2919 // Set the pointer to the new heap number in tmp.
2920 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2921 // Restore input_reg after call to runtime.
2922 __ LoadFromSafepointRegisterSlot(input, input);
2923 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002924
Ben Murdoch8b112d22011-06-08 16:22:53 +01002925 __ bind(&allocated);
2926 // exponent: floating point exponent value.
2927 // tmp1: allocated heap number.
2928 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2929 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2930 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2931 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002932
Ben Murdoch8b112d22011-06-08 16:22:53 +01002933 __ StoreToSafepointRegisterSlot(tmp1, input);
2934 }
Steve Block1e0659c2011-05-24 12:43:12 +01002935
2936 __ bind(&done);
2937}
2938
2939
2940void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2941 Register input = ToRegister(instr->InputAt(0));
2942 __ cmp(input, Operand(0));
2943 // We can make rsb conditional because the previous cmp instruction
2944 // will clear the V (overflow) flag and rsb won't set this flag
2945 // if input is positive.
2946 __ rsb(input, input, Operand(0), SetCC, mi);
2947 // Deoptimize on overflow.
2948 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002949}
2950
2951
2952void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002953 // Class for deferred case.
2954 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2955 public:
2956 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2957 LUnaryMathOperation* instr)
2958 : LDeferredCode(codegen), instr_(instr) { }
2959 virtual void Generate() {
2960 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2961 }
2962 private:
2963 LUnaryMathOperation* instr_;
2964 };
2965
2966 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2967 Representation r = instr->hydrogen()->value()->representation();
2968 if (r.IsDouble()) {
2969 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
2970 __ vabs(input, input);
2971 } else if (r.IsInteger32()) {
2972 EmitIntegerMathAbs(instr);
2973 } else {
2974 // Representation is tagged.
2975 DeferredMathAbsTaggedHeapNumber* deferred =
2976 new DeferredMathAbsTaggedHeapNumber(this, instr);
2977 Register input = ToRegister(instr->InputAt(0));
2978 // Smi check.
2979 __ JumpIfNotSmi(input, deferred->entry());
2980 // If smi, handle it directly.
2981 EmitIntegerMathAbs(instr);
2982 __ bind(deferred->exit());
2983 }
2984}
2985
2986
Ben Murdochb0fe1622011-05-05 13:52:32 +01002987void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002988 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002989 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002990 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002991 Register scratch1 = scratch0();
2992 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002993
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002994 __ EmitVFPTruncate(kRoundToMinusInf,
2995 single_scratch,
2996 input,
2997 scratch1,
2998 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002999 DeoptimizeIf(ne, instr->environment());
3000
3001 // Move the result back to general purpose register r0.
3002 __ vmov(result, single_scratch);
3003
Steve Block44f0eee2011-05-26 01:26:41 +01003004 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3005 // Test for -0.
3006 Label done;
3007 __ cmp(result, Operand(0));
3008 __ b(ne, &done);
3009 __ vmov(scratch1, input.high());
3010 __ tst(scratch1, Operand(HeapNumber::kSignMask));
3011 DeoptimizeIf(ne, instr->environment());
3012 __ bind(&done);
3013 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003014}
3015
3016
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003017void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3018 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
3019 Register result = ToRegister(instr->result());
Ben Murdoch257744e2011-11-30 15:57:28 +00003020 Register scratch1 = result;
3021 Register scratch2 = scratch0();
3022 Label done, check_sign_on_zero;
3023
3024 // Extract exponent bits.
3025 __ vmov(scratch1, input.high());
3026 __ ubfx(scratch2,
3027 scratch1,
3028 HeapNumber::kExponentShift,
3029 HeapNumber::kExponentBits);
3030
3031 // If the number is in ]-0.5, +0.5[, the result is +/- 0.
3032 __ cmp(scratch2, Operand(HeapNumber::kExponentBias - 2));
3033 __ mov(result, Operand(0), LeaveCC, le);
3034 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3035 __ b(le, &check_sign_on_zero);
3036 } else {
3037 __ b(le, &done);
3038 }
3039
3040 // The following conversion will not work with numbers
3041 // outside of ]-2^32, 2^32[.
3042 __ cmp(scratch2, Operand(HeapNumber::kExponentBias + 32));
3043 DeoptimizeIf(ge, instr->environment());
3044
3045 // Save the original sign for later comparison.
3046 __ and_(scratch2, scratch1, Operand(HeapNumber::kSignMask));
3047
3048 __ vmov(double_scratch0(), 0.5);
3049 __ vadd(input, input, double_scratch0());
3050
3051 // Check sign of the result: if the sign changed, the input
3052 // value was in ]0.5, 0[ and the result should be -0.
3053 __ vmov(scratch1, input.high());
3054 __ eor(scratch1, scratch1, Operand(scratch2), SetCC);
3055 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3056 DeoptimizeIf(mi, instr->environment());
3057 } else {
3058 __ mov(result, Operand(0), LeaveCC, mi);
3059 __ b(mi, &done);
3060 }
3061
3062 __ EmitVFPTruncate(kRoundToMinusInf,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003063 double_scratch0().low(),
3064 input,
3065 scratch1,
3066 scratch2);
3067 DeoptimizeIf(ne, instr->environment());
3068 __ vmov(result, double_scratch0().low());
3069
Steve Block44f0eee2011-05-26 01:26:41 +01003070 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3071 // Test for -0.
Steve Block44f0eee2011-05-26 01:26:41 +01003072 __ cmp(result, Operand(0));
3073 __ b(ne, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003074 __ bind(&check_sign_on_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01003075 __ vmov(scratch1, input.high());
3076 __ tst(scratch1, Operand(HeapNumber::kSignMask));
3077 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003078 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003079 __ bind(&done);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003080}
3081
3082
Ben Murdochb0fe1622011-05-05 13:52:32 +01003083void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003084 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01003085 ASSERT(ToDoubleRegister(instr->result()).is(input));
3086 __ vsqrt(input, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003087}
3088
3089
Steve Block44f0eee2011-05-26 01:26:41 +01003090void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3091 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
3092 Register scratch = scratch0();
3093 SwVfpRegister single_scratch = double_scratch0().low();
3094 DoubleRegister double_scratch = double_scratch0();
3095 ASSERT(ToDoubleRegister(instr->result()).is(input));
3096
3097 // Add +0 to convert -0 to +0.
3098 __ mov(scratch, Operand(0));
3099 __ vmov(single_scratch, scratch);
3100 __ vcvt_f64_s32(double_scratch, single_scratch);
3101 __ vadd(input, input, double_scratch);
3102 __ vsqrt(input, input);
3103}
3104
3105
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003106void LCodeGen::DoPower(LPower* instr) {
3107 LOperand* left = instr->InputAt(0);
3108 LOperand* right = instr->InputAt(1);
3109 Register scratch = scratch0();
3110 DoubleRegister result_reg = ToDoubleRegister(instr->result());
3111 Representation exponent_type = instr->hydrogen()->right()->representation();
3112 if (exponent_type.IsDouble()) {
3113 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003114 __ PrepareCallCFunction(0, 2, scratch);
3115 __ SetCallCDoubleArguments(ToDoubleRegister(left),
3116 ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003117 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003118 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003119 } else if (exponent_type.IsInteger32()) {
3120 ASSERT(ToRegister(right).is(r0));
3121 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003122 __ PrepareCallCFunction(1, 1, scratch);
3123 __ SetCallCDoubleArguments(ToDoubleRegister(left), ToRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003124 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003125 ExternalReference::power_double_int_function(isolate()), 1, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003126 } else {
3127 ASSERT(exponent_type.IsTagged());
3128 ASSERT(instr->hydrogen()->left()->representation().IsDouble());
3129
3130 Register right_reg = ToRegister(right);
3131
3132 // Check for smi on the right hand side.
3133 Label non_smi, call;
3134 __ JumpIfNotSmi(right_reg, &non_smi);
3135
3136 // Untag smi and convert it to a double.
3137 __ SmiUntag(right_reg);
3138 SwVfpRegister single_scratch = double_scratch0().low();
3139 __ vmov(single_scratch, right_reg);
3140 __ vcvt_f64_s32(result_reg, single_scratch);
3141 __ jmp(&call);
3142
3143 // Heap number map check.
3144 __ bind(&non_smi);
3145 __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
3146 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3147 __ cmp(scratch, Operand(ip));
3148 DeoptimizeIf(ne, instr->environment());
3149 int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
3150 __ add(scratch, right_reg, Operand(value_offset));
3151 __ vldr(result_reg, scratch, 0);
3152
3153 // Prepare arguments and call C function.
3154 __ bind(&call);
Ben Murdoch257744e2011-11-30 15:57:28 +00003155 __ PrepareCallCFunction(0, 2, scratch);
3156 __ SetCallCDoubleArguments(ToDoubleRegister(left), result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01003157 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003158 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003159 }
3160 // Store the result in the result register.
3161 __ GetCFunctionDoubleResult(result_reg);
3162}
3163
3164
3165void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3166 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3167 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3168 TranscendentalCacheStub::UNTAGGED);
3169 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3170}
3171
3172
3173void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3174 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3175 TranscendentalCacheStub stub(TranscendentalCache::COS,
3176 TranscendentalCacheStub::UNTAGGED);
3177 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3178}
3179
3180
3181void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3182 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3183 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3184 TranscendentalCacheStub::UNTAGGED);
3185 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3186}
3187
3188
Ben Murdochb0fe1622011-05-05 13:52:32 +01003189void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3190 switch (instr->op()) {
3191 case kMathAbs:
3192 DoMathAbs(instr);
3193 break;
3194 case kMathFloor:
3195 DoMathFloor(instr);
3196 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003197 case kMathRound:
3198 DoMathRound(instr);
3199 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003200 case kMathSqrt:
3201 DoMathSqrt(instr);
3202 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003203 case kMathPowHalf:
3204 DoMathPowHalf(instr);
3205 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003206 case kMathCos:
3207 DoMathCos(instr);
3208 break;
3209 case kMathSin:
3210 DoMathSin(instr);
3211 break;
3212 case kMathLog:
3213 DoMathLog(instr);
3214 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003215 default:
3216 Abort("Unimplemented type of LUnaryMathOperation.");
3217 UNREACHABLE();
3218 }
3219}
3220
3221
Ben Murdoch257744e2011-11-30 15:57:28 +00003222void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3223 ASSERT(ToRegister(instr->function()).is(r1));
3224 ASSERT(instr->HasPointerMap());
3225 ASSERT(instr->HasDeoptimizationEnvironment());
3226 LPointerMap* pointers = instr->pointer_map();
3227 LEnvironment* env = instr->deoptimization_environment();
3228 RecordPosition(pointers->position());
3229 RegisterEnvironmentForDeoptimization(env);
3230 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3231 ParameterCount count(instr->arity());
3232 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3233 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3234}
3235
3236
Ben Murdochb0fe1622011-05-05 13:52:32 +01003237void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003238 ASSERT(ToRegister(instr->result()).is(r0));
3239
3240 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003241 Handle<Code> ic =
3242 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003243 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3244 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003245}
3246
3247
3248void LCodeGen::DoCallNamed(LCallNamed* instr) {
3249 ASSERT(ToRegister(instr->result()).is(r0));
3250
3251 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003252 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3253 Handle<Code> ic =
3254 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003255 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003256 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003257 // Restore context register.
3258 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3259}
3260
3261
3262void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003263 ASSERT(ToRegister(instr->result()).is(r0));
3264
3265 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003266 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
Steve Block9fac8402011-05-12 15:51:54 +01003267 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3268 __ Drop(1);
3269 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003270}
3271
3272
3273void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003274 ASSERT(ToRegister(instr->result()).is(r0));
3275
3276 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003277 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
Steve Block44f0eee2011-05-26 01:26:41 +01003278 Handle<Code> ic =
Ben Murdoch257744e2011-11-30 15:57:28 +00003279 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003280 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003281 CallCode(ic, mode, instr);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003282 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003283}
3284
3285
3286void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3287 ASSERT(ToRegister(instr->result()).is(r0));
3288 __ mov(r1, Operand(instr->target()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003289 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003290}
3291
3292
3293void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003294 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003295 ASSERT(ToRegister(instr->result()).is(r0));
3296
Steve Block44f0eee2011-05-26 01:26:41 +01003297 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003298 __ mov(r0, Operand(instr->arity()));
3299 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3300}
3301
3302
3303void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3304 CallRuntime(instr->function(), instr->arity(), instr);
3305}
3306
3307
3308void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003309 Register object = ToRegister(instr->object());
3310 Register value = ToRegister(instr->value());
3311 Register scratch = scratch0();
3312 int offset = instr->offset();
3313
3314 ASSERT(!object.is(value));
3315
3316 if (!instr->transition().is_null()) {
3317 __ mov(scratch, Operand(instr->transition()));
3318 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3319 }
3320
3321 // Do the store.
3322 if (instr->is_in_object()) {
3323 __ str(value, FieldMemOperand(object, offset));
3324 if (instr->needs_write_barrier()) {
3325 // Update the write barrier for the object for in-object properties.
3326 __ RecordWrite(object, Operand(offset), value, scratch);
3327 }
3328 } else {
3329 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3330 __ str(value, FieldMemOperand(scratch, offset));
3331 if (instr->needs_write_barrier()) {
3332 // Update the write barrier for the properties array.
3333 // object is used as a scratch register.
3334 __ RecordWrite(scratch, Operand(offset), value, object);
3335 }
3336 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003337}
3338
3339
3340void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3341 ASSERT(ToRegister(instr->object()).is(r1));
3342 ASSERT(ToRegister(instr->value()).is(r0));
3343
3344 // Name is always in r2.
3345 __ mov(r2, Operand(instr->name()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003346 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003347 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3348 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003349 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3350}
3351
3352
3353void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003354 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003355 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003356}
3357
3358
3359void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003360 Register value = ToRegister(instr->value());
3361 Register elements = ToRegister(instr->object());
3362 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3363 Register scratch = scratch0();
3364
3365 // Do the store.
3366 if (instr->key()->IsConstantOperand()) {
3367 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3368 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3369 int offset =
3370 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3371 __ str(value, FieldMemOperand(elements, offset));
3372 } else {
3373 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3374 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3375 }
3376
3377 if (instr->hydrogen()->NeedsWriteBarrier()) {
3378 // Compute address of modified element and store it into key register.
3379 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
3380 __ RecordWrite(elements, key, value);
3381 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003382}
3383
3384
Steve Block44f0eee2011-05-26 01:26:41 +01003385void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3386 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003387
3388 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00003389 Register key = no_reg;
Ben Murdoch8b112d22011-06-08 16:22:53 +01003390 ExternalArrayType array_type = instr->array_type();
Ben Murdoch257744e2011-11-30 15:57:28 +00003391 bool key_is_constant = instr->key()->IsConstantOperand();
3392 int constant_key = 0;
3393 if (key_is_constant) {
3394 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3395 if (constant_key & 0xF0000000) {
3396 Abort("array index constant value too big.");
3397 }
3398 } else {
3399 key = ToRegister(instr->key());
3400 }
3401 int shift_size = ExternalArrayTypeToShiftSize(array_type);
3402
3403 if (array_type == kExternalFloatArray || array_type == kExternalDoubleArray) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003404 CpuFeatures::Scope scope(VFP3);
3405 DwVfpRegister value(ToDoubleRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003406 Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
3407 : Operand(key, LSL, shift_size));
3408 __ add(scratch0(), external_pointer, operand);
3409 if (array_type == kExternalFloatArray) {
3410 __ vcvt_f32_f64(double_scratch0().low(), value);
3411 __ vstr(double_scratch0().low(), scratch0(), 0);
3412 } else { // i.e. array_type == kExternalDoubleArray
3413 __ vstr(value, scratch0(), 0);
3414 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003415 } else {
3416 Register value(ToRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003417 MemOperand mem_operand(key_is_constant
3418 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
3419 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003420 switch (array_type) {
3421 case kExternalPixelArray:
Ben Murdoch8b112d22011-06-08 16:22:53 +01003422 case kExternalByteArray:
3423 case kExternalUnsignedByteArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00003424 __ strb(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003425 break;
3426 case kExternalShortArray:
3427 case kExternalUnsignedShortArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00003428 __ strh(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003429 break;
3430 case kExternalIntArray:
3431 case kExternalUnsignedIntArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00003432 __ str(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003433 break;
3434 case kExternalFloatArray:
Ben Murdoch257744e2011-11-30 15:57:28 +00003435 case kExternalDoubleArray:
Ben Murdoch8b112d22011-06-08 16:22:53 +01003436 UNREACHABLE();
3437 break;
3438 }
3439 }
Steve Block44f0eee2011-05-26 01:26:41 +01003440}
3441
3442
Ben Murdochb0fe1622011-05-05 13:52:32 +01003443void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3444 ASSERT(ToRegister(instr->object()).is(r2));
3445 ASSERT(ToRegister(instr->key()).is(r1));
3446 ASSERT(ToRegister(instr->value()).is(r0));
3447
Ben Murdoch8b112d22011-06-08 16:22:53 +01003448 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003449 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3450 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003451 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3452}
3453
3454
Ben Murdoch257744e2011-11-30 15:57:28 +00003455void LCodeGen::DoStringAdd(LStringAdd* instr) {
3456 __ push(ToRegister(instr->left()));
3457 __ push(ToRegister(instr->right()));
3458 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3459 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3460}
3461
3462
Steve Block1e0659c2011-05-24 12:43:12 +01003463void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3464 class DeferredStringCharCodeAt: public LDeferredCode {
3465 public:
3466 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3467 : LDeferredCode(codegen), instr_(instr) { }
3468 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3469 private:
3470 LStringCharCodeAt* instr_;
3471 };
3472
3473 Register scratch = scratch0();
3474 Register string = ToRegister(instr->string());
3475 Register index = no_reg;
3476 int const_index = -1;
3477 if (instr->index()->IsConstantOperand()) {
3478 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3479 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3480 if (!Smi::IsValid(const_index)) {
3481 // Guaranteed to be out of bounds because of the assert above.
3482 // So the bounds check that must dominate this instruction must
3483 // have deoptimized already.
3484 if (FLAG_debug_code) {
3485 __ Abort("StringCharCodeAt: out of bounds index.");
3486 }
3487 // No code needs to be generated.
3488 return;
3489 }
3490 } else {
3491 index = ToRegister(instr->index());
3492 }
3493 Register result = ToRegister(instr->result());
3494
3495 DeferredStringCharCodeAt* deferred =
3496 new DeferredStringCharCodeAt(this, instr);
3497
3498 Label flat_string, ascii_string, done;
3499
3500 // Fetch the instance type of the receiver into result register.
3501 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3502 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3503
3504 // We need special handling for non-flat strings.
3505 STATIC_ASSERT(kSeqStringTag == 0);
3506 __ tst(result, Operand(kStringRepresentationMask));
3507 __ b(eq, &flat_string);
3508
3509 // Handle non-flat strings.
3510 __ tst(result, Operand(kIsConsStringMask));
3511 __ b(eq, deferred->entry());
3512
3513 // ConsString.
3514 // Check whether the right hand side is the empty string (i.e. if
3515 // this is really a flat string in a cons string). If that is not
3516 // the case we would rather go to the runtime system now to flatten
3517 // the string.
3518 __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset));
3519 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
3520 __ cmp(scratch, ip);
3521 __ b(ne, deferred->entry());
3522 // Get the first of the two strings and load its instance type.
3523 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
3524 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3525 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3526 // If the first cons component is also non-flat, then go to runtime.
3527 STATIC_ASSERT(kSeqStringTag == 0);
3528 __ tst(result, Operand(kStringRepresentationMask));
3529 __ b(ne, deferred->entry());
3530
3531 // Check for 1-byte or 2-byte string.
3532 __ bind(&flat_string);
3533 STATIC_ASSERT(kAsciiStringTag != 0);
3534 __ tst(result, Operand(kStringEncodingMask));
3535 __ b(ne, &ascii_string);
3536
3537 // 2-byte string.
3538 // Load the 2-byte character code into the result register.
3539 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3540 if (instr->index()->IsConstantOperand()) {
3541 __ ldrh(result,
3542 FieldMemOperand(string,
3543 SeqTwoByteString::kHeaderSize + 2 * const_index));
3544 } else {
3545 __ add(scratch,
3546 string,
3547 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3548 __ ldrh(result, MemOperand(scratch, index, LSL, 1));
3549 }
3550 __ jmp(&done);
3551
3552 // ASCII string.
3553 // Load the byte into the result register.
3554 __ bind(&ascii_string);
3555 if (instr->index()->IsConstantOperand()) {
3556 __ ldrb(result, FieldMemOperand(string,
3557 SeqAsciiString::kHeaderSize + const_index));
3558 } else {
3559 __ add(scratch,
3560 string,
3561 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3562 __ ldrb(result, MemOperand(scratch, index));
3563 }
3564 __ bind(&done);
3565 __ bind(deferred->exit());
3566}
3567
3568
3569void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3570 Register string = ToRegister(instr->string());
3571 Register result = ToRegister(instr->result());
3572 Register scratch = scratch0();
3573
3574 // TODO(3095996): Get rid of this. For now, we need to make the
3575 // result register contain a valid pointer because it is already
3576 // contained in the register pointer map.
3577 __ mov(result, Operand(0));
3578
Ben Murdoch8b112d22011-06-08 16:22:53 +01003579 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003580 __ push(string);
3581 // Push the index as a smi. This is safe because of the checks in
3582 // DoStringCharCodeAt above.
3583 if (instr->index()->IsConstantOperand()) {
3584 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3585 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3586 __ push(scratch);
3587 } else {
3588 Register index = ToRegister(instr->index());
3589 __ SmiTag(index);
3590 __ push(index);
3591 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003592 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003593 if (FLAG_debug_code) {
3594 __ AbortIfNotSmi(r0);
3595 }
3596 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003597 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003598}
3599
3600
Steve Block44f0eee2011-05-26 01:26:41 +01003601void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3602 class DeferredStringCharFromCode: public LDeferredCode {
3603 public:
3604 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3605 : LDeferredCode(codegen), instr_(instr) { }
3606 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3607 private:
3608 LStringCharFromCode* instr_;
3609 };
3610
3611 DeferredStringCharFromCode* deferred =
3612 new DeferredStringCharFromCode(this, instr);
3613
3614 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3615 Register char_code = ToRegister(instr->char_code());
3616 Register result = ToRegister(instr->result());
3617 ASSERT(!char_code.is(result));
3618
3619 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3620 __ b(hi, deferred->entry());
3621 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3622 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3623 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3624 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3625 __ cmp(result, ip);
3626 __ b(eq, deferred->entry());
3627 __ bind(deferred->exit());
3628}
3629
3630
3631void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3632 Register char_code = ToRegister(instr->char_code());
3633 Register result = ToRegister(instr->result());
3634
3635 // TODO(3095996): Get rid of this. For now, we need to make the
3636 // result register contain a valid pointer because it is already
3637 // contained in the register pointer map.
3638 __ mov(result, Operand(0));
3639
Ben Murdoch8b112d22011-06-08 16:22:53 +01003640 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003641 __ SmiTag(char_code);
3642 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003643 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003644 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003645}
3646
3647
Steve Block1e0659c2011-05-24 12:43:12 +01003648void LCodeGen::DoStringLength(LStringLength* instr) {
3649 Register string = ToRegister(instr->InputAt(0));
3650 Register result = ToRegister(instr->result());
3651 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3652}
3653
3654
Ben Murdochb0fe1622011-05-05 13:52:32 +01003655void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003656 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003657 ASSERT(input->IsRegister() || input->IsStackSlot());
3658 LOperand* output = instr->result();
3659 ASSERT(output->IsDoubleRegister());
3660 SwVfpRegister single_scratch = double_scratch0().low();
3661 if (input->IsStackSlot()) {
3662 Register scratch = scratch0();
3663 __ ldr(scratch, ToMemOperand(input));
3664 __ vmov(single_scratch, scratch);
3665 } else {
3666 __ vmov(single_scratch, ToRegister(input));
3667 }
3668 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003669}
3670
3671
3672void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3673 class DeferredNumberTagI: public LDeferredCode {
3674 public:
3675 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3676 : LDeferredCode(codegen), instr_(instr) { }
3677 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3678 private:
3679 LNumberTagI* instr_;
3680 };
3681
Steve Block1e0659c2011-05-24 12:43:12 +01003682 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003683 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3684 Register reg = ToRegister(input);
3685
3686 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3687 __ SmiTag(reg, SetCC);
3688 __ b(vs, deferred->entry());
3689 __ bind(deferred->exit());
3690}
3691
3692
3693void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3694 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003695 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003696 DoubleRegister dbl_scratch = d0;
3697 SwVfpRegister flt_scratch = s0;
3698
3699 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003700 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003701
3702 // There was overflow, so bits 30 and 31 of the original integer
3703 // disagree. Try to allocate a heap number in new space and store
3704 // the value in there. If that fails, call the runtime system.
3705 Label done;
3706 __ SmiUntag(reg);
3707 __ eor(reg, reg, Operand(0x80000000));
3708 __ vmov(flt_scratch, reg);
3709 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3710 if (FLAG_inline_new) {
3711 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3712 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3713 if (!reg.is(r5)) __ mov(reg, r5);
3714 __ b(&done);
3715 }
3716
3717 // Slow case: Call the runtime system to do the number allocation.
3718 __ bind(&slow);
3719
3720 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3721 // register is stored, as this register is in the pointer map, but contains an
3722 // integer value.
3723 __ mov(ip, Operand(0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003724 __ StoreToSafepointRegisterSlot(ip, reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003725 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003726 if (!reg.is(r0)) __ mov(reg, r0);
3727
3728 // Done. Put the value in dbl_scratch into the value of the allocated heap
3729 // number.
3730 __ bind(&done);
3731 __ sub(ip, reg, Operand(kHeapObjectTag));
3732 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003733 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003734}
3735
3736
3737void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3738 class DeferredNumberTagD: public LDeferredCode {
3739 public:
3740 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3741 : LDeferredCode(codegen), instr_(instr) { }
3742 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3743 private:
3744 LNumberTagD* instr_;
3745 };
3746
Steve Block1e0659c2011-05-24 12:43:12 +01003747 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003748 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003749 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003750 Register temp1 = ToRegister(instr->TempAt(0));
3751 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003752
3753 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3754 if (FLAG_inline_new) {
3755 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3756 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3757 } else {
3758 __ jmp(deferred->entry());
3759 }
3760 __ bind(deferred->exit());
3761 __ sub(ip, reg, Operand(kHeapObjectTag));
3762 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3763}
3764
3765
3766void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3767 // TODO(3095996): Get rid of this. For now, we need to make the
3768 // result register contain a valid pointer because it is already
3769 // contained in the register pointer map.
3770 Register reg = ToRegister(instr->result());
3771 __ mov(reg, Operand(0));
3772
Ben Murdoch8b112d22011-06-08 16:22:53 +01003773 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3774 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003775 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003776}
3777
3778
3779void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003780 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003781 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3782 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3783 __ SmiTag(ToRegister(input));
3784}
3785
3786
3787void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003788 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003789 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3790 if (instr->needs_check()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003791 ASSERT(kHeapObjectTag == 1);
3792 // If the input is a HeapObject, SmiUntag will set the carry flag.
3793 __ SmiUntag(ToRegister(input), SetCC);
3794 DeoptimizeIf(cs, instr->environment());
3795 } else {
3796 __ SmiUntag(ToRegister(input));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003797 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003798}
3799
3800
3801void LCodeGen::EmitNumberUntagD(Register input_reg,
3802 DoubleRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003803 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003804 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003805 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003806 SwVfpRegister flt_scratch = s0;
3807 ASSERT(!result_reg.is(d0));
3808
3809 Label load_smi, heap_number, done;
3810
3811 // Smi check.
3812 __ tst(input_reg, Operand(kSmiTagMask));
3813 __ b(eq, &load_smi);
3814
3815 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003816 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003817 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003818 __ cmp(scratch, Operand(ip));
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003819 if (deoptimize_on_undefined) {
3820 DeoptimizeIf(ne, env);
3821 } else {
3822 Label heap_number;
3823 __ b(eq, &heap_number);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003824
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003825 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3826 __ cmp(input_reg, Operand(ip));
3827 DeoptimizeIf(ne, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003828
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003829 // Convert undefined to NaN.
3830 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3831 __ sub(ip, ip, Operand(kHeapObjectTag));
3832 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3833 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003834
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003835 __ bind(&heap_number);
3836 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003837 // Heap number to double register conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003838 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3839 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3840 __ jmp(&done);
3841
3842 // Smi to double register conversion
3843 __ bind(&load_smi);
3844 __ SmiUntag(input_reg); // Untag smi before converting to float.
3845 __ vmov(flt_scratch, input_reg);
3846 __ vcvt_f64_s32(result_reg, flt_scratch);
3847 __ SmiTag(input_reg); // Retag smi.
3848 __ bind(&done);
3849}
3850
3851
3852class DeferredTaggedToI: public LDeferredCode {
3853 public:
3854 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3855 : LDeferredCode(codegen), instr_(instr) { }
3856 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3857 private:
3858 LTaggedToI* instr_;
3859};
3860
3861
3862void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003863 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003864 Register scratch1 = scratch0();
3865 Register scratch2 = ToRegister(instr->TempAt(0));
3866 DwVfpRegister double_scratch = double_scratch0();
3867 SwVfpRegister single_scratch = double_scratch.low();
3868
3869 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3870 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3871
3872 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003873
Ben Murdoch257744e2011-11-30 15:57:28 +00003874 // The input was optimistically untagged; revert it.
3875 // The carry flag is set when we reach this deferred code as we just executed
3876 // SmiUntag(heap_object, SetCC)
3877 ASSERT(kHeapObjectTag == 1);
3878 __ adc(input_reg, input_reg, Operand(input_reg));
3879
Ben Murdochb0fe1622011-05-05 13:52:32 +01003880 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01003881 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003882 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01003883 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003884
3885 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003886 Register scratch3 = ToRegister(instr->TempAt(1));
3887 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3888 ASSERT(!scratch3.is(input_reg) &&
3889 !scratch3.is(scratch1) &&
3890 !scratch3.is(scratch2));
3891 // Performs a truncating conversion of a floating point number as used by
3892 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003893 Label heap_number;
3894 __ b(eq, &heap_number);
3895 // Check for undefined. Undefined is converted to zero for truncating
3896 // conversions.
3897 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3898 __ cmp(input_reg, Operand(ip));
3899 DeoptimizeIf(ne, instr->environment());
3900 __ mov(input_reg, Operand(0));
3901 __ b(&done);
3902
3903 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01003904 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3905 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3906
3907 __ EmitECMATruncate(input_reg,
3908 double_scratch2,
3909 single_scratch,
3910 scratch1,
3911 scratch2,
3912 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003913
3914 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003915 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003916 // Deoptimize if we don't have a heap number.
3917 DeoptimizeIf(ne, instr->environment());
3918
3919 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003920 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3921 __ EmitVFPTruncate(kRoundToZero,
3922 single_scratch,
3923 double_scratch,
3924 scratch1,
3925 scratch2,
3926 kCheckForInexactConversion);
3927 DeoptimizeIf(ne, instr->environment());
3928 // Load the result.
3929 __ vmov(input_reg, single_scratch);
3930
Ben Murdochb0fe1622011-05-05 13:52:32 +01003931 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003932 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003933 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01003934 __ vmov(scratch1, double_scratch.high());
3935 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003936 DeoptimizeIf(ne, instr->environment());
3937 }
3938 }
3939 __ bind(&done);
3940}
3941
3942
3943void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003944 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003945 ASSERT(input->IsRegister());
3946 ASSERT(input->Equals(instr->result()));
3947
3948 Register input_reg = ToRegister(input);
3949
3950 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3951
Ben Murdoch257744e2011-11-30 15:57:28 +00003952 // Optimistically untag the input.
3953 // If the input is a HeapObject, SmiUntag will set the carry flag.
3954 __ SmiUntag(input_reg, SetCC);
3955 // Branch to deferred code if the input was tagged.
3956 // The deferred code will take care of restoring the tag.
3957 __ b(cs, deferred->entry());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003958 __ bind(deferred->exit());
3959}
3960
3961
3962void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003963 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003964 ASSERT(input->IsRegister());
3965 LOperand* result = instr->result();
3966 ASSERT(result->IsDoubleRegister());
3967
3968 Register input_reg = ToRegister(input);
3969 DoubleRegister result_reg = ToDoubleRegister(result);
3970
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003971 EmitNumberUntagD(input_reg, result_reg,
3972 instr->hydrogen()->deoptimize_on_undefined(),
3973 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003974}
3975
3976
3977void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003978 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003979 Register scratch1 = scratch0();
3980 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003981 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
3982 DwVfpRegister double_scratch = double_scratch0();
3983 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003984
Steve Block44f0eee2011-05-26 01:26:41 +01003985 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003986
Steve Block44f0eee2011-05-26 01:26:41 +01003987 if (instr->truncating()) {
3988 Register scratch3 = ToRegister(instr->TempAt(1));
3989 __ EmitECMATruncate(result_reg,
3990 double_input,
3991 single_scratch,
3992 scratch1,
3993 scratch2,
3994 scratch3);
3995 } else {
3996 VFPRoundingMode rounding_mode = kRoundToMinusInf;
3997 __ EmitVFPTruncate(rounding_mode,
3998 single_scratch,
3999 double_input,
4000 scratch1,
4001 scratch2,
4002 kCheckForInexactConversion);
4003 // Deoptimize if we had a vfp invalid exception,
4004 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01004005 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01004006 // Retrieve the result.
4007 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01004008 }
Steve Block44f0eee2011-05-26 01:26:41 +01004009 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004010}
4011
4012
4013void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004014 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004015 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01004016 DeoptimizeIf(ne, instr->environment());
4017}
4018
4019
4020void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4021 LOperand* input = instr->InputAt(0);
4022 __ tst(ToRegister(input), Operand(kSmiTagMask));
4023 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004024}
4025
4026
4027void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004028 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004029 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01004030
4031 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
4032 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004033
Ben Murdoch257744e2011-11-30 15:57:28 +00004034 if (instr->hydrogen()->is_interval_check()) {
4035 InstanceType first;
4036 InstanceType last;
4037 instr->hydrogen()->GetCheckInterval(&first, &last);
4038
4039 __ cmp(scratch, Operand(first));
4040
4041 // If there is only one type in the interval check for equality.
4042 if (first == last) {
4043 DeoptimizeIf(ne, instr->environment());
4044 } else {
4045 DeoptimizeIf(lo, instr->environment());
4046 // Omit check for the last type.
4047 if (last != LAST_TYPE) {
4048 __ cmp(scratch, Operand(last));
4049 DeoptimizeIf(hi, instr->environment());
4050 }
4051 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01004052 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004053 uint8_t mask;
4054 uint8_t tag;
4055 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4056
4057 if (IsPowerOf2(mask)) {
4058 ASSERT(tag == 0 || IsPowerOf2(tag));
4059 __ tst(scratch, Operand(mask));
4060 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment());
4061 } else {
4062 __ and_(scratch, scratch, Operand(mask));
4063 __ cmp(scratch, Operand(tag));
4064 DeoptimizeIf(ne, instr->environment());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004065 }
4066 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004067}
4068
4069
4070void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004071 ASSERT(instr->InputAt(0)->IsRegister());
4072 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004073 __ cmp(reg, Operand(instr->hydrogen()->target()));
4074 DeoptimizeIf(ne, instr->environment());
4075}
4076
4077
4078void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004079 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01004080 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004081 ASSERT(input->IsRegister());
4082 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01004083 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
4084 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004085 DeoptimizeIf(ne, instr->environment());
4086}
4087
4088
Ben Murdoch257744e2011-11-30 15:57:28 +00004089void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4090 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
4091 Register result_reg = ToRegister(instr->result());
4092 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4093 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
4094}
4095
4096
4097void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4098 Register unclamped_reg = ToRegister(instr->unclamped());
4099 Register result_reg = ToRegister(instr->result());
4100 __ ClampUint8(result_reg, unclamped_reg);
4101}
4102
4103
4104void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4105 Register scratch = scratch0();
4106 Register input_reg = ToRegister(instr->unclamped());
4107 Register result_reg = ToRegister(instr->result());
4108 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4109 Label is_smi, done, heap_number;
4110
4111 // Both smi and heap number cases are handled.
4112 __ JumpIfSmi(input_reg, &is_smi);
4113
4114 // Check for heap number
4115 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4116 __ cmp(scratch, Operand(factory()->heap_number_map()));
4117 __ b(eq, &heap_number);
4118
4119 // Check for undefined. Undefined is converted to zero for clamping
4120 // conversions.
4121 __ cmp(input_reg, Operand(factory()->undefined_value()));
4122 DeoptimizeIf(ne, instr->environment());
4123 __ movt(input_reg, 0);
4124 __ jmp(&done);
4125
4126 // Heap number
4127 __ bind(&heap_number);
4128 __ vldr(double_scratch0(), FieldMemOperand(input_reg,
4129 HeapNumber::kValueOffset));
4130 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
4131 __ jmp(&done);
4132
4133 // smi
4134 __ bind(&is_smi);
4135 __ SmiUntag(result_reg, input_reg);
4136 __ ClampUint8(result_reg, result_reg);
4137
4138 __ bind(&done);
4139}
4140
4141
Ben Murdochb8e0da22011-05-16 14:20:40 +01004142void LCodeGen::LoadHeapObject(Register result,
4143 Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01004144 if (heap()->InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01004145 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01004146 factory()->NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01004147 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01004148 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01004149 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004150 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01004151 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004152}
4153
4154
4155void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004156 Register temp1 = ToRegister(instr->TempAt(0));
4157 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01004158
4159 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01004160 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01004161
4162 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004163 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004164
4165 // Check prototype maps up to the holder.
4166 while (!current_prototype.is_identical_to(holder)) {
4167 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4168 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4169 DeoptimizeIf(ne, instr->environment());
4170 current_prototype =
4171 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4172 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004173 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004174 }
4175
4176 // Check the holder map.
4177 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4178 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4179 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004180}
4181
4182
4183void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004184 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4185 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4186 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4187 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
4188 __ Push(r3, r2, r1);
4189
4190 // Pick the right runtime function or stub to call.
4191 int length = instr->hydrogen()->length();
4192 if (instr->hydrogen()->IsCopyOnWrite()) {
4193 ASSERT(instr->hydrogen()->depth() == 1);
4194 FastCloneShallowArrayStub::Mode mode =
4195 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4196 FastCloneShallowArrayStub stub(mode, length);
4197 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4198 } else if (instr->hydrogen()->depth() > 1) {
4199 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4200 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4201 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4202 } else {
4203 FastCloneShallowArrayStub::Mode mode =
4204 FastCloneShallowArrayStub::CLONE_ELEMENTS;
4205 FastCloneShallowArrayStub stub(mode, length);
4206 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4207 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004208}
4209
4210
4211void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004212 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4213 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
4214 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4215 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
4216 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
4217 __ Push(r4, r3, r2, r1);
4218
4219 // Pick the right runtime function to call.
4220 if (instr->hydrogen()->depth() > 1) {
4221 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4222 } else {
4223 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4224 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004225}
4226
4227
Steve Block44f0eee2011-05-26 01:26:41 +01004228void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4229 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
4230 __ push(r0);
4231 CallRuntime(Runtime::kToFastProperties, 1, instr);
4232}
4233
4234
Ben Murdochb0fe1622011-05-05 13:52:32 +01004235void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004236 Label materialized;
4237 // Registers will be used as follows:
4238 // r3 = JS function.
4239 // r7 = literals array.
4240 // r1 = regexp literal.
4241 // r0 = regexp literal clone.
4242 // r2 and r4-r6 are used as temporaries.
4243 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4244 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4245 int literal_offset = FixedArray::kHeaderSize +
4246 instr->hydrogen()->literal_index() * kPointerSize;
4247 __ ldr(r1, FieldMemOperand(r7, literal_offset));
4248 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4249 __ cmp(r1, ip);
4250 __ b(ne, &materialized);
4251
4252 // Create regexp literal using runtime function
4253 // Result will be in r0.
4254 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4255 __ mov(r5, Operand(instr->hydrogen()->pattern()));
4256 __ mov(r4, Operand(instr->hydrogen()->flags()));
4257 __ Push(r7, r6, r5, r4);
4258 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4259 __ mov(r1, r0);
4260
4261 __ bind(&materialized);
4262 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4263 Label allocated, runtime_allocate;
4264
4265 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
4266 __ jmp(&allocated);
4267
4268 __ bind(&runtime_allocate);
4269 __ mov(r0, Operand(Smi::FromInt(size)));
4270 __ Push(r1, r0);
4271 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4272 __ pop(r1);
4273
4274 __ bind(&allocated);
4275 // Copy the content into the newly allocated memory.
4276 // (Unroll copy loop once for better throughput).
4277 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4278 __ ldr(r3, FieldMemOperand(r1, i));
4279 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
4280 __ str(r3, FieldMemOperand(r0, i));
4281 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
4282 }
4283 if ((size % (2 * kPointerSize)) != 0) {
4284 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
4285 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
4286 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004287}
4288
4289
4290void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004291 // Use the fast case closure allocation code that allocates in new
4292 // space for nested functions that don't need literals cloning.
4293 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004294 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004295 if (!pretenure && shared_info->num_literals() == 0) {
4296 FastNewClosureStub stub(
4297 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004298 __ mov(r1, Operand(shared_info));
4299 __ push(r1);
4300 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4301 } else {
4302 __ mov(r2, Operand(shared_info));
4303 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004304 ? factory()->true_value()
4305 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004306 __ Push(cp, r2, r1);
4307 CallRuntime(Runtime::kNewClosure, 3, instr);
4308 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004309}
4310
4311
4312void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004313 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004314 __ push(input);
4315 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004316}
4317
4318
4319void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004320 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004321 Register result = ToRegister(instr->result());
4322 Label true_label;
4323 Label false_label;
4324 Label done;
4325
4326 Condition final_branch_condition = EmitTypeofIs(&true_label,
4327 &false_label,
4328 input,
4329 instr->type_literal());
4330 __ b(final_branch_condition, &true_label);
4331 __ bind(&false_label);
4332 __ LoadRoot(result, Heap::kFalseValueRootIndex);
4333 __ b(&done);
4334
4335 __ bind(&true_label);
4336 __ LoadRoot(result, Heap::kTrueValueRootIndex);
4337
4338 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004339}
4340
4341
4342void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004343 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004344 int true_block = chunk_->LookupDestination(instr->true_block_id());
4345 int false_block = chunk_->LookupDestination(instr->false_block_id());
4346 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4347 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4348
4349 Condition final_branch_condition = EmitTypeofIs(true_label,
4350 false_label,
4351 input,
4352 instr->type_literal());
4353
4354 EmitBranch(true_block, false_block, final_branch_condition);
4355}
4356
4357
4358Condition LCodeGen::EmitTypeofIs(Label* true_label,
4359 Label* false_label,
4360 Register input,
4361 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004362 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004363 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004364 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004365 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004366 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4367 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4368 __ cmp(input, Operand(ip));
4369 final_branch_condition = eq;
4370
Steve Block44f0eee2011-05-26 01:26:41 +01004371 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004372 __ JumpIfSmi(input, false_label);
4373 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4374 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004375 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4376 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004377 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004378
Steve Block44f0eee2011-05-26 01:26:41 +01004379 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004380 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004381 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004382 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004383 final_branch_condition = eq;
4384
Steve Block44f0eee2011-05-26 01:26:41 +01004385 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004386 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004387 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004388 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004389 // Check for undetectable objects => true.
4390 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4391 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4392 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4393 final_branch_condition = ne;
4394
Steve Block44f0eee2011-05-26 01:26:41 +01004395 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004396 __ JumpIfSmi(input, false_label);
4397 __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4398 final_branch_condition = ge;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004399
Steve Block44f0eee2011-05-26 01:26:41 +01004400 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004401 __ JumpIfSmi(input, false_label);
4402 __ CompareRoot(input, Heap::kNullValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004403 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004404 __ CompareObjectType(input, input, scratch, FIRST_JS_OBJECT_TYPE);
4405 __ b(lo, false_label);
4406 __ CompareInstanceType(input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4407 __ b(hs, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004408 // Check for undetectable objects => false.
4409 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4410 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004411 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004412
4413 } else {
4414 final_branch_condition = ne;
4415 __ b(false_label);
4416 // A dead branch instruction will be generated after this point.
4417 }
4418
4419 return final_branch_condition;
4420}
4421
4422
Steve Block1e0659c2011-05-24 12:43:12 +01004423void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4424 Register result = ToRegister(instr->result());
4425 Label true_label;
4426 Label false_label;
4427 Label done;
4428
4429 EmitIsConstructCall(result, scratch0());
4430 __ b(eq, &true_label);
4431
4432 __ LoadRoot(result, Heap::kFalseValueRootIndex);
4433 __ b(&done);
4434
4435
4436 __ bind(&true_label);
4437 __ LoadRoot(result, Heap::kTrueValueRootIndex);
4438
4439 __ bind(&done);
4440}
4441
4442
4443void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4444 Register temp1 = ToRegister(instr->TempAt(0));
4445 int true_block = chunk_->LookupDestination(instr->true_block_id());
4446 int false_block = chunk_->LookupDestination(instr->false_block_id());
4447
4448 EmitIsConstructCall(temp1, scratch0());
4449 EmitBranch(true_block, false_block, eq);
4450}
4451
4452
4453void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4454 ASSERT(!temp1.is(temp2));
4455 // Get the frame pointer for the calling frame.
4456 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4457
4458 // Skip the arguments adaptor frame if it exists.
4459 Label check_frame_marker;
4460 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4461 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4462 __ b(ne, &check_frame_marker);
4463 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4464
4465 // Check the marker in the calling frame.
4466 __ bind(&check_frame_marker);
4467 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4468 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4469}
4470
4471
Ben Murdochb0fe1622011-05-05 13:52:32 +01004472void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4473 // No code for lazy bailout instruction. Used to capture environment after a
4474 // call for populating the safepoint data with deoptimization data.
4475}
4476
4477
4478void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004479 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004480}
4481
4482
4483void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004484 Register object = ToRegister(instr->object());
4485 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004486 Register strict = scratch0();
4487 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4488 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004489 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4490 LPointerMap* pointers = instr->pointer_map();
4491 LEnvironment* env = instr->deoptimization_environment();
4492 RecordPosition(pointers->position());
4493 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01004494 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01004495 pointers,
4496 env->deoptimization_index());
Ben Murdoch257744e2011-11-30 15:57:28 +00004497 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4498}
4499
4500
4501void LCodeGen::DoIn(LIn* instr) {
4502 Register obj = ToRegister(instr->object());
4503 Register key = ToRegister(instr->key());
4504 __ Push(key, obj);
4505 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4506 LPointerMap* pointers = instr->pointer_map();
4507 LEnvironment* env = instr->deoptimization_environment();
4508 RecordPosition(pointers->position());
4509 RegisterEnvironmentForDeoptimization(env);
4510 SafepointGenerator safepoint_generator(this,
4511 pointers,
4512 env->deoptimization_index());
4513 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004514}
4515
4516
4517void LCodeGen::DoStackCheck(LStackCheck* instr) {
4518 // Perform stack overflow check.
4519 Label ok;
4520 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4521 __ cmp(sp, Operand(ip));
4522 __ b(hs, &ok);
4523 StackCheckStub stub;
4524 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4525 __ bind(&ok);
4526}
4527
4528
4529void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004530 // This is a pseudo-instruction that ensures that the environment here is
4531 // properly registered for deoptimization and records the assembler's PC
4532 // offset.
4533 LEnvironment* environment = instr->environment();
4534 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4535 instr->SpilledDoubleRegisterArray());
4536
4537 // If the environment were already registered, we would have no way of
4538 // backpatching it with the spill slot operands.
4539 ASSERT(!environment->HasBeenRegistered());
4540 RegisterEnvironmentForDeoptimization(environment);
4541 ASSERT(osr_pc_offset_ == -1);
4542 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004543}
4544
4545
Ben Murdoch257744e2011-11-30 15:57:28 +00004546
4547
Ben Murdochb0fe1622011-05-05 13:52:32 +01004548#undef __
4549
4550} } // namespace v8::internal