blob: f53cebbb3b10703b2fb521560dd700cc48218a8f [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "arm/lithium-codegen-arm.h"
29#include "code-stubs.h"
30#include "stub-cache.h"
31
32namespace v8 {
33namespace internal {
34
35
36class SafepointGenerator : public PostCallGenerator {
37 public:
38 SafepointGenerator(LCodeGen* codegen,
39 LPointerMap* pointers,
40 int deoptimization_index)
41 : codegen_(codegen),
42 pointers_(pointers),
43 deoptimization_index_(deoptimization_index) { }
44 virtual ~SafepointGenerator() { }
45
46 virtual void Generate() {
47 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
48 }
49
50 private:
51 LCodeGen* codegen_;
52 LPointerMap* pointers_;
53 int deoptimization_index_;
54};
55
56
57#define __ masm()->
58
59bool LCodeGen::GenerateCode() {
60 HPhase phase("Code generation", chunk());
61 ASSERT(is_unused());
62 status_ = GENERATING;
63 CpuFeatures::Scope scope1(VFP3);
64 CpuFeatures::Scope scope2(ARMv7);
65 return GeneratePrologue() &&
66 GenerateBody() &&
67 GenerateDeferredCode() &&
68 GenerateSafepointTable();
69}
70
71
72void LCodeGen::FinishCode(Handle<Code> code) {
73 ASSERT(is_done());
74 code->set_stack_slots(StackSlotCount());
75 code->set_safepoint_table_start(safepoints_.GetCodeOffset());
76 PopulateDeoptimizationData(code);
77}
78
79
80void LCodeGen::Abort(const char* format, ...) {
81 if (FLAG_trace_bailout) {
82 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
83 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
84 va_list arguments;
85 va_start(arguments, format);
86 OS::VPrint(format, arguments);
87 va_end(arguments);
88 PrintF("\n");
89 }
90 status_ = ABORTED;
91}
92
93
94void LCodeGen::Comment(const char* format, ...) {
95 if (!FLAG_code_comments) return;
96 char buffer[4 * KB];
97 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
98 va_list arguments;
99 va_start(arguments, format);
100 builder.AddFormattedList(format, arguments);
101 va_end(arguments);
102
103 // Copy the string before recording it in the assembler to avoid
104 // issues when the stack allocated buffer goes out of scope.
105 size_t length = builder.position();
106 Vector<char> copy = Vector<char>::New(length + 1);
107 memcpy(copy.start(), builder.Finalize(), copy.length());
108 masm()->RecordComment(copy.start());
109}
110
111
112bool LCodeGen::GeneratePrologue() {
113 ASSERT(is_generating());
114
115#ifdef DEBUG
116 if (strlen(FLAG_stop_at) > 0 &&
117 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
118 __ stop("stop_at");
119 }
120#endif
121
122 // r1: Callee's JS function.
123 // cp: Callee's context.
124 // fp: Caller's frame pointer.
125 // lr: Caller's pc.
126
127 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
128 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
129
130 // Reserve space for the stack slots needed by the code.
131 int slots = StackSlotCount();
132 if (slots > 0) {
133 if (FLAG_debug_code) {
134 __ mov(r0, Operand(slots));
135 __ mov(r2, Operand(kSlotsZapValue));
136 Label loop;
137 __ bind(&loop);
138 __ push(r2);
139 __ sub(r0, r0, Operand(1), SetCC);
140 __ b(ne, &loop);
141 } else {
142 __ sub(sp, sp, Operand(slots * kPointerSize));
143 }
144 }
145
146 // Trace the call.
147 if (FLAG_trace) {
148 __ CallRuntime(Runtime::kTraceEnter, 0);
149 }
150 return !is_aborted();
151}
152
153
154bool LCodeGen::GenerateBody() {
155 ASSERT(is_generating());
156 bool emit_instructions = true;
157 for (current_instruction_ = 0;
158 !is_aborted() && current_instruction_ < instructions_->length();
159 current_instruction_++) {
160 LInstruction* instr = instructions_->at(current_instruction_);
161 if (instr->IsLabel()) {
162 LLabel* label = LLabel::cast(instr);
163 emit_instructions = !label->HasReplacement();
164 }
165
166 if (emit_instructions) {
167 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
168 instr->CompileToNative(this);
169 }
170 }
171 return !is_aborted();
172}
173
174
175LInstruction* LCodeGen::GetNextInstruction() {
176 if (current_instruction_ < instructions_->length() - 1) {
177 return instructions_->at(current_instruction_ + 1);
178 } else {
179 return NULL;
180 }
181}
182
183
184bool LCodeGen::GenerateDeferredCode() {
185 ASSERT(is_generating());
186 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
187 LDeferredCode* code = deferred_[i];
188 __ bind(code->entry());
189 code->Generate();
190 __ jmp(code->exit());
191 }
192
193 // Deferred code is the last part of the instruction sequence. Mark
194 // the generated code as done unless we bailed out.
195 if (!is_aborted()) status_ = DONE;
196 return !is_aborted();
197}
198
199
200bool LCodeGen::GenerateSafepointTable() {
201 ASSERT(is_done());
202 safepoints_.Emit(masm(), StackSlotCount());
203 return !is_aborted();
204}
205
206
207Register LCodeGen::ToRegister(int index) const {
208 return Register::FromAllocationIndex(index);
209}
210
211
212DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
213 return DoubleRegister::FromAllocationIndex(index);
214}
215
216
217Register LCodeGen::ToRegister(LOperand* op) const {
218 ASSERT(op->IsRegister());
219 return ToRegister(op->index());
220}
221
222
223Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
224 if (op->IsRegister()) {
225 return ToRegister(op->index());
226 } else if (op->IsConstantOperand()) {
227 __ mov(scratch, ToOperand(op));
228 return scratch;
229 } else if (op->IsStackSlot() || op->IsArgument()) {
230 __ ldr(scratch, ToMemOperand(op));
231 return scratch;
232 }
233 UNREACHABLE();
234 return scratch;
235}
236
237
238DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
239 ASSERT(op->IsDoubleRegister());
240 return ToDoubleRegister(op->index());
241}
242
243
244DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
245 SwVfpRegister flt_scratch,
246 DoubleRegister dbl_scratch) {
247 if (op->IsDoubleRegister()) {
248 return ToDoubleRegister(op->index());
249 } else if (op->IsConstantOperand()) {
250 LConstantOperand* const_op = LConstantOperand::cast(op);
251 Handle<Object> literal = chunk_->LookupLiteral(const_op);
252 Representation r = chunk_->LookupLiteralRepresentation(const_op);
253 if (r.IsInteger32()) {
254 ASSERT(literal->IsNumber());
255 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
256 __ vmov(flt_scratch, ip);
257 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
258 return dbl_scratch;
259 } else if (r.IsDouble()) {
260 Abort("unsupported double immediate");
261 } else if (r.IsTagged()) {
262 Abort("unsupported tagged immediate");
263 }
264 } else if (op->IsStackSlot() || op->IsArgument()) {
265 // TODO(regis): Why is vldr not taking a MemOperand?
266 // __ vldr(dbl_scratch, ToMemOperand(op));
267 MemOperand mem_op = ToMemOperand(op);
268 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
269 return dbl_scratch;
270 }
271 UNREACHABLE();
272 return dbl_scratch;
273}
274
275
276int LCodeGen::ToInteger32(LConstantOperand* op) const {
277 Handle<Object> value = chunk_->LookupLiteral(op);
278 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
279 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
280 value->Number());
281 return static_cast<int32_t>(value->Number());
282}
283
284
285Operand LCodeGen::ToOperand(LOperand* op) {
286 if (op->IsConstantOperand()) {
287 LConstantOperand* const_op = LConstantOperand::cast(op);
288 Handle<Object> literal = chunk_->LookupLiteral(const_op);
289 Representation r = chunk_->LookupLiteralRepresentation(const_op);
290 if (r.IsInteger32()) {
291 ASSERT(literal->IsNumber());
292 return Operand(static_cast<int32_t>(literal->Number()));
293 } else if (r.IsDouble()) {
294 Abort("ToOperand Unsupported double immediate.");
295 }
296 ASSERT(r.IsTagged());
297 return Operand(literal);
298 } else if (op->IsRegister()) {
299 return Operand(ToRegister(op));
300 } else if (op->IsDoubleRegister()) {
301 Abort("ToOperand IsDoubleRegister unimplemented");
302 return Operand(0);
303 }
304 // Stack slots not implemented, use ToMemOperand instead.
305 UNREACHABLE();
306 return Operand(0);
307}
308
309
310MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
311 // TODO(regis): Revisit.
312 ASSERT(!op->IsRegister());
313 ASSERT(!op->IsDoubleRegister());
314 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
315 int index = op->index();
316 if (index >= 0) {
317 // Local or spill slot. Skip the frame pointer, function, and
318 // context in the fixed part of the frame.
319 return MemOperand(fp, -(index + 3) * kPointerSize);
320 } else {
321 // Incoming parameter. Skip the return address.
322 return MemOperand(fp, -(index - 1) * kPointerSize);
323 }
324}
325
326
327void LCodeGen::AddToTranslation(Translation* translation,
328 LOperand* op,
329 bool is_tagged) {
330 if (op == NULL) {
331 // TODO(twuerthinger): Introduce marker operands to indicate that this value
332 // is not present and must be reconstructed from the deoptimizer. Currently
333 // this is only used for the arguments object.
334 translation->StoreArgumentsObject();
335 } else if (op->IsStackSlot()) {
336 if (is_tagged) {
337 translation->StoreStackSlot(op->index());
338 } else {
339 translation->StoreInt32StackSlot(op->index());
340 }
341 } else if (op->IsDoubleStackSlot()) {
342 translation->StoreDoubleStackSlot(op->index());
343 } else if (op->IsArgument()) {
344 ASSERT(is_tagged);
345 int src_index = StackSlotCount() + op->index();
346 translation->StoreStackSlot(src_index);
347 } else if (op->IsRegister()) {
348 Register reg = ToRegister(op);
349 if (is_tagged) {
350 translation->StoreRegister(reg);
351 } else {
352 translation->StoreInt32Register(reg);
353 }
354 } else if (op->IsDoubleRegister()) {
355 DoubleRegister reg = ToDoubleRegister(op);
356 translation->StoreDoubleRegister(reg);
357 } else if (op->IsConstantOperand()) {
358 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
359 int src_index = DefineDeoptimizationLiteral(literal);
360 translation->StoreLiteral(src_index);
361 } else {
362 UNREACHABLE();
363 }
364}
365
366
367void LCodeGen::CallCode(Handle<Code> code,
368 RelocInfo::Mode mode,
369 LInstruction* instr) {
370 if (instr != NULL) {
371 LPointerMap* pointers = instr->pointer_map();
372 RecordPosition(pointers->position());
373 __ Call(code, mode);
374 RegisterLazyDeoptimization(instr);
375 } else {
376 LPointerMap no_pointers(0);
377 RecordPosition(no_pointers.position());
378 __ Call(code, mode);
379 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
380 }
381}
382
383
384void LCodeGen::CallRuntime(Runtime::Function* function,
385 int num_arguments,
386 LInstruction* instr) {
387 ASSERT(instr != NULL);
388 LPointerMap* pointers = instr->pointer_map();
389 ASSERT(pointers != NULL);
390 RecordPosition(pointers->position());
391
392 __ CallRuntime(function, num_arguments);
393 // Runtime calls to Throw are not supposed to ever return at the
394 // call site, so don't register lazy deoptimization for these. We do
395 // however have to record a safepoint since throwing exceptions can
396 // cause garbage collections.
397 if (!instr->IsThrow()) {
398 RegisterLazyDeoptimization(instr);
399 } else {
400 RecordSafepoint(instr->pointer_map(), Safepoint::kNoDeoptimizationIndex);
401 }
402}
403
404
405void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
406 // Create the environment to bailout to. If the call has side effects
407 // execution has to continue after the call otherwise execution can continue
408 // from a previous bailout point repeating the call.
409 LEnvironment* deoptimization_environment;
410 if (instr->HasDeoptimizationEnvironment()) {
411 deoptimization_environment = instr->deoptimization_environment();
412 } else {
413 deoptimization_environment = instr->environment();
414 }
415
416 RegisterEnvironmentForDeoptimization(deoptimization_environment);
417 RecordSafepoint(instr->pointer_map(),
418 deoptimization_environment->deoptimization_index());
419}
420
421
422void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
423 if (!environment->HasBeenRegistered()) {
424 // Physical stack frame layout:
425 // -x ............. -4 0 ..................................... y
426 // [incoming arguments] [spill slots] [pushed outgoing arguments]
427
428 // Layout of the environment:
429 // 0 ..................................................... size-1
430 // [parameters] [locals] [expression stack including arguments]
431
432 // Layout of the translation:
433 // 0 ........................................................ size - 1 + 4
434 // [expression stack including arguments] [locals] [4 words] [parameters]
435 // |>------------ translation_size ------------<|
436
437 int frame_count = 0;
438 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
439 ++frame_count;
440 }
441 Translation translation(&translations_, frame_count);
442 environment->WriteTranslation(this, &translation);
443 int deoptimization_index = deoptimizations_.length();
444 environment->Register(deoptimization_index, translation.index());
445 deoptimizations_.Add(environment);
446 }
447}
448
449
450void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
451 RegisterEnvironmentForDeoptimization(environment);
452 ASSERT(environment->HasBeenRegistered());
453 int id = environment->deoptimization_index();
454 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
455 ASSERT(entry != NULL);
456 if (entry == NULL) {
457 Abort("bailout was not prepared");
458 return;
459 }
460
461 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
462
463 if (FLAG_deopt_every_n_times == 1 &&
464 info_->shared_info()->opt_count() == id) {
465 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
466 return;
467 }
468
469 if (cc == no_condition) {
470 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt");
471 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
472 } else {
473 if (FLAG_trap_on_deopt) {
474 Label done;
475 __ b(&done, NegateCondition(cc));
476 __ stop("trap_on_deopt");
477 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
478 __ bind(&done);
479 } else {
480 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc);
481 }
482 }
483}
484
485
486void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
487 int length = deoptimizations_.length();
488 if (length == 0) return;
489 ASSERT(FLAG_deopt);
490 Handle<DeoptimizationInputData> data =
491 Factory::NewDeoptimizationInputData(length, TENURED);
492
493 data->SetTranslationByteArray(*translations_.CreateByteArray());
494 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
495
496 Handle<FixedArray> literals =
497 Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
498 for (int i = 0; i < deoptimization_literals_.length(); i++) {
499 literals->set(i, *deoptimization_literals_[i]);
500 }
501 data->SetLiteralArray(*literals);
502
503 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
504 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
505
506 // Populate the deoptimization entries.
507 for (int i = 0; i < length; i++) {
508 LEnvironment* env = deoptimizations_[i];
509 data->SetAstId(i, Smi::FromInt(env->ast_id()));
510 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
511 data->SetArgumentsStackHeight(i,
512 Smi::FromInt(env->arguments_stack_height()));
513 }
514 code->set_deoptimization_data(*data);
515}
516
517
518int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
519 int result = deoptimization_literals_.length();
520 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
521 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
522 }
523 deoptimization_literals_.Add(literal);
524 return result;
525}
526
527
528void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
529 ASSERT(deoptimization_literals_.length() == 0);
530
531 const ZoneList<Handle<JSFunction> >* inlined_closures =
532 chunk()->inlined_closures();
533
534 for (int i = 0, length = inlined_closures->length();
535 i < length;
536 i++) {
537 DefineDeoptimizationLiteral(inlined_closures->at(i));
538 }
539
540 inlined_function_count_ = deoptimization_literals_.length();
541}
542
543
544void LCodeGen::RecordSafepoint(LPointerMap* pointers,
545 int deoptimization_index) {
546 const ZoneList<LOperand*>* operands = pointers->operands();
547 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
548 deoptimization_index);
549 for (int i = 0; i < operands->length(); i++) {
550 LOperand* pointer = operands->at(i);
551 if (pointer->IsStackSlot()) {
552 safepoint.DefinePointerSlot(pointer->index());
553 }
554 }
555}
556
557
558void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
559 int arguments,
560 int deoptimization_index) {
561 const ZoneList<LOperand*>* operands = pointers->operands();
562 Safepoint safepoint =
563 safepoints_.DefineSafepointWithRegisters(
564 masm(), arguments, deoptimization_index);
565 for (int i = 0; i < operands->length(); i++) {
566 LOperand* pointer = operands->at(i);
567 if (pointer->IsStackSlot()) {
568 safepoint.DefinePointerSlot(pointer->index());
569 } else if (pointer->IsRegister()) {
570 safepoint.DefinePointerRegister(ToRegister(pointer));
571 }
572 }
573 // Register cp always contains a pointer to the context.
574 safepoint.DefinePointerRegister(cp);
575}
576
577
578void LCodeGen::RecordPosition(int position) {
579 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
580 masm()->positions_recorder()->RecordPosition(position);
581}
582
583
584void LCodeGen::DoLabel(LLabel* label) {
585 if (label->is_loop_header()) {
586 Comment(";;; B%d - LOOP entry", label->block_id());
587 } else {
588 Comment(";;; B%d", label->block_id());
589 }
590 __ bind(label->label());
591 current_block_ = label->block_id();
592 LCodeGen::DoGap(label);
593}
594
595
596void LCodeGen::DoParallelMove(LParallelMove* move) {
597 // d0 must always be a scratch register.
598 DoubleRegister dbl_scratch = d0;
599 LUnallocated marker_operand(LUnallocated::NONE);
600
Steve Block9fac8402011-05-12 15:51:54 +0100601 Register core_scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100602 bool destroys_core_scratch = false;
603
604 LGapResolver resolver(move->move_operands(), &marker_operand);
605 const ZoneList<LMoveOperands>* moves = resolver.ResolveInReverseOrder();
606 for (int i = moves->length() - 1; i >= 0; --i) {
607 LMoveOperands move = moves->at(i);
608 LOperand* from = move.from();
609 LOperand* to = move.to();
610 ASSERT(!from->IsDoubleRegister() ||
611 !ToDoubleRegister(from).is(dbl_scratch));
612 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(dbl_scratch));
613 ASSERT(!from->IsRegister() || !ToRegister(from).is(core_scratch));
614 ASSERT(!to->IsRegister() || !ToRegister(to).is(core_scratch));
615 if (from == &marker_operand) {
616 if (to->IsRegister()) {
617 __ mov(ToRegister(to), core_scratch);
618 ASSERT(destroys_core_scratch);
619 } else if (to->IsStackSlot()) {
620 __ str(core_scratch, ToMemOperand(to));
621 ASSERT(destroys_core_scratch);
622 } else if (to->IsDoubleRegister()) {
623 __ vmov(ToDoubleRegister(to), dbl_scratch);
624 } else {
625 ASSERT(to->IsDoubleStackSlot());
626 // TODO(regis): Why is vstr not taking a MemOperand?
627 // __ vstr(dbl_scratch, ToMemOperand(to));
628 MemOperand to_operand = ToMemOperand(to);
629 __ vstr(dbl_scratch, to_operand.rn(), to_operand.offset());
630 }
631 } else if (to == &marker_operand) {
632 if (from->IsRegister() || from->IsConstantOperand()) {
633 __ mov(core_scratch, ToOperand(from));
634 destroys_core_scratch = true;
635 } else if (from->IsStackSlot()) {
636 __ ldr(core_scratch, ToMemOperand(from));
637 destroys_core_scratch = true;
638 } else if (from->IsDoubleRegister()) {
639 __ vmov(dbl_scratch, ToDoubleRegister(from));
640 } else {
641 ASSERT(from->IsDoubleStackSlot());
642 // TODO(regis): Why is vldr not taking a MemOperand?
643 // __ vldr(dbl_scratch, ToMemOperand(from));
644 MemOperand from_operand = ToMemOperand(from);
645 __ vldr(dbl_scratch, from_operand.rn(), from_operand.offset());
646 }
647 } else if (from->IsConstantOperand()) {
648 if (to->IsRegister()) {
649 __ mov(ToRegister(to), ToOperand(from));
650 } else {
651 ASSERT(to->IsStackSlot());
652 __ mov(ip, ToOperand(from));
653 __ str(ip, ToMemOperand(to));
654 }
655 } else if (from->IsRegister()) {
656 if (to->IsRegister()) {
657 __ mov(ToRegister(to), ToOperand(from));
658 } else {
659 ASSERT(to->IsStackSlot());
660 __ str(ToRegister(from), ToMemOperand(to));
661 }
662 } else if (to->IsRegister()) {
663 ASSERT(from->IsStackSlot());
664 __ ldr(ToRegister(to), ToMemOperand(from));
665 } else if (from->IsStackSlot()) {
666 ASSERT(to->IsStackSlot());
667 __ ldr(ip, ToMemOperand(from));
668 __ str(ip, ToMemOperand(to));
669 } else if (from->IsDoubleRegister()) {
670 if (to->IsDoubleRegister()) {
671 __ vmov(ToDoubleRegister(to), ToDoubleRegister(from));
672 } else {
673 ASSERT(to->IsDoubleStackSlot());
674 // TODO(regis): Why is vstr not taking a MemOperand?
675 // __ vstr(dbl_scratch, ToMemOperand(to));
676 MemOperand to_operand = ToMemOperand(to);
677 __ vstr(ToDoubleRegister(from), to_operand.rn(), to_operand.offset());
678 }
679 } else if (to->IsDoubleRegister()) {
680 ASSERT(from->IsDoubleStackSlot());
681 // TODO(regis): Why is vldr not taking a MemOperand?
682 // __ vldr(ToDoubleRegister(to), ToMemOperand(from));
683 MemOperand from_operand = ToMemOperand(from);
684 __ vldr(ToDoubleRegister(to), from_operand.rn(), from_operand.offset());
685 } else {
686 ASSERT(to->IsDoubleStackSlot() && from->IsDoubleStackSlot());
687 // TODO(regis): Why is vldr not taking a MemOperand?
688 // __ vldr(dbl_scratch, ToMemOperand(from));
689 MemOperand from_operand = ToMemOperand(from);
690 __ vldr(dbl_scratch, from_operand.rn(), from_operand.offset());
691 // TODO(regis): Why is vstr not taking a MemOperand?
692 // __ vstr(dbl_scratch, ToMemOperand(to));
693 MemOperand to_operand = ToMemOperand(to);
694 __ vstr(dbl_scratch, to_operand.rn(), to_operand.offset());
695 }
696 }
697
698 if (destroys_core_scratch) {
699 __ ldr(core_scratch, MemOperand(fp, -kPointerSize));
700 }
701
702 LInstruction* next = GetNextInstruction();
703 if (next != NULL && next->IsLazyBailout()) {
704 int pc = masm()->pc_offset();
705 safepoints_.SetPcAfterGap(pc);
706 }
707}
708
709
710void LCodeGen::DoGap(LGap* gap) {
711 for (int i = LGap::FIRST_INNER_POSITION;
712 i <= LGap::LAST_INNER_POSITION;
713 i++) {
714 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
715 LParallelMove* move = gap->GetParallelMove(inner_pos);
716 if (move != NULL) DoParallelMove(move);
717 }
718
719 LInstruction* next = GetNextInstruction();
720 if (next != NULL && next->IsLazyBailout()) {
721 int pc = masm()->pc_offset();
722 safepoints_.SetPcAfterGap(pc);
723 }
724}
725
726
727void LCodeGen::DoParameter(LParameter* instr) {
728 // Nothing to do.
729}
730
731
732void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100733 ASSERT(ToRegister(instr->result()).is(r0));
734 switch (instr->hydrogen()->major_key()) {
735 case CodeStub::RegExpConstructResult: {
736 RegExpConstructResultStub stub;
737 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
738 break;
739 }
740 case CodeStub::RegExpExec: {
741 RegExpExecStub stub;
742 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
743 break;
744 }
745 case CodeStub::SubString: {
746 SubStringStub stub;
747 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
748 break;
749 }
750 case CodeStub::StringCharAt: {
751 Abort("StringCharAtStub unimplemented.");
752 break;
753 }
754 case CodeStub::MathPow: {
755 Abort("MathPowStub unimplemented.");
756 break;
757 }
758 case CodeStub::NumberToString: {
759 NumberToStringStub stub;
760 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
761 break;
762 }
763 case CodeStub::StringAdd: {
764 StringAddStub stub(NO_STRING_ADD_FLAGS);
765 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
766 break;
767 }
768 case CodeStub::StringCompare: {
769 StringCompareStub stub;
770 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
771 break;
772 }
773 case CodeStub::TranscendentalCache: {
774 Abort("TranscendentalCache unimplemented.");
775 break;
776 }
777 default:
778 UNREACHABLE();
779 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100780}
781
782
783void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
784 // Nothing to do.
785}
786
787
788void LCodeGen::DoModI(LModI* instr) {
789 Abort("DoModI unimplemented.");
790}
791
792
793void LCodeGen::DoDivI(LDivI* instr) {
794 Abort("DoDivI unimplemented.");
795}
796
797
798void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100799 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100800 Register left = ToRegister(instr->left());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100801 Register right = EmitLoadRegister(instr->right(), scratch);
802
803 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
804 !instr->right()->IsConstantOperand()) {
805 __ orr(ToRegister(instr->temp()), left, right);
806 }
807
808 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
809 // scratch:left = left * right.
810 __ smull(scratch, left, left, right);
811 __ mov(ip, Operand(left, ASR, 31));
812 __ cmp(ip, Operand(scratch));
813 DeoptimizeIf(ne, instr->environment());
814 } else {
815 __ mul(left, left, right);
816 }
817
818 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
819 // Bail out if the result is supposed to be negative zero.
820 Label done;
821 __ tst(left, Operand(left));
822 __ b(ne, &done);
823 if (instr->right()->IsConstantOperand()) {
824 if (ToInteger32(LConstantOperand::cast(instr->right())) < 0) {
825 DeoptimizeIf(no_condition, instr->environment());
826 }
827 } else {
828 // Test the non-zero operand for negative sign.
829 __ cmp(ToRegister(instr->temp()), Operand(0));
830 DeoptimizeIf(mi, instr->environment());
831 }
832 __ bind(&done);
833 }
834}
835
836
837void LCodeGen::DoBitI(LBitI* instr) {
838 LOperand* left = instr->left();
839 LOperand* right = instr->right();
840 ASSERT(left->Equals(instr->result()));
841 ASSERT(left->IsRegister());
842 Register result = ToRegister(left);
843 Register right_reg = EmitLoadRegister(right, ip);
844 switch (instr->op()) {
845 case Token::BIT_AND:
846 __ and_(result, ToRegister(left), Operand(right_reg));
847 break;
848 case Token::BIT_OR:
849 __ orr(result, ToRegister(left), Operand(right_reg));
850 break;
851 case Token::BIT_XOR:
852 __ eor(result, ToRegister(left), Operand(right_reg));
853 break;
854 default:
855 UNREACHABLE();
856 break;
857 }
858}
859
860
861void LCodeGen::DoShiftI(LShiftI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100862 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100863 LOperand* left = instr->left();
864 LOperand* right = instr->right();
865 ASSERT(left->Equals(instr->result()));
866 ASSERT(left->IsRegister());
867 Register result = ToRegister(left);
868 if (right->IsRegister()) {
869 // Mask the right operand.
Steve Block9fac8402011-05-12 15:51:54 +0100870 __ and_(scratch, ToRegister(right), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100871 switch (instr->op()) {
872 case Token::SAR:
Steve Block9fac8402011-05-12 15:51:54 +0100873 __ mov(result, Operand(result, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100874 break;
875 case Token::SHR:
876 if (instr->can_deopt()) {
Steve Block9fac8402011-05-12 15:51:54 +0100877 __ mov(result, Operand(result, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100878 DeoptimizeIf(mi, instr->environment());
879 } else {
Steve Block9fac8402011-05-12 15:51:54 +0100880 __ mov(result, Operand(result, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100881 }
882 break;
883 case Token::SHL:
Steve Block9fac8402011-05-12 15:51:54 +0100884 __ mov(result, Operand(result, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100885 break;
886 default:
887 UNREACHABLE();
888 break;
889 }
890 } else {
891 int value = ToInteger32(LConstantOperand::cast(right));
892 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
893 switch (instr->op()) {
894 case Token::SAR:
895 if (shift_count != 0) {
896 __ mov(result, Operand(result, ASR, shift_count));
897 }
898 break;
899 case Token::SHR:
900 if (shift_count == 0 && instr->can_deopt()) {
901 __ tst(result, Operand(0x80000000));
902 DeoptimizeIf(ne, instr->environment());
903 } else {
904 __ mov(result, Operand(result, LSR, shift_count));
905 }
906 break;
907 case Token::SHL:
908 if (shift_count != 0) {
909 __ mov(result, Operand(result, LSL, shift_count));
910 }
911 break;
912 default:
913 UNREACHABLE();
914 break;
915 }
916 }
917}
918
919
920void LCodeGen::DoSubI(LSubI* instr) {
921 Register left = ToRegister(instr->left());
922 Register right = EmitLoadRegister(instr->right(), ip);
923 ASSERT(instr->left()->Equals(instr->result()));
924 __ sub(left, left, right, SetCC);
925 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
926 DeoptimizeIf(vs, instr->environment());
927 }
928}
929
930
931void LCodeGen::DoConstantI(LConstantI* instr) {
932 ASSERT(instr->result()->IsRegister());
933 __ mov(ToRegister(instr->result()), Operand(instr->value()));
934}
935
936
937void LCodeGen::DoConstantD(LConstantD* instr) {
938 Abort("DoConstantD unimplemented.");
939}
940
941
942void LCodeGen::DoConstantT(LConstantT* instr) {
943 ASSERT(instr->result()->IsRegister());
944 __ mov(ToRegister(instr->result()), Operand(instr->value()));
945}
946
947
Steve Block9fac8402011-05-12 15:51:54 +0100948void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100949 Register result = ToRegister(instr->result());
Steve Block9fac8402011-05-12 15:51:54 +0100950 Register array = ToRegister(instr->input());
951 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
952}
Ben Murdochb0fe1622011-05-05 13:52:32 +0100953
Ben Murdochb0fe1622011-05-05 13:52:32 +0100954
Steve Block9fac8402011-05-12 15:51:54 +0100955void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
956 Register result = ToRegister(instr->result());
957 Register array = ToRegister(instr->input());
958 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
959 Abort("DoFixedArrayLength untested.");
Ben Murdochb0fe1622011-05-05 13:52:32 +0100960}
961
962
963void LCodeGen::DoValueOf(LValueOf* instr) {
964 Abort("DoValueOf unimplemented.");
965}
966
967
968void LCodeGen::DoBitNotI(LBitNotI* instr) {
969 LOperand* input = instr->input();
970 ASSERT(input->Equals(instr->result()));
971 __ mvn(ToRegister(input), Operand(ToRegister(input)));
972 Abort("DoBitNotI untested.");
973}
974
975
976void LCodeGen::DoThrow(LThrow* instr) {
977 Register input_reg = EmitLoadRegister(instr->input(), ip);
978 __ push(input_reg);
979 CallRuntime(Runtime::kThrow, 1, instr);
980
981 if (FLAG_debug_code) {
982 __ stop("Unreachable code.");
983 }
984}
985
986
987void LCodeGen::DoAddI(LAddI* instr) {
988 LOperand* left = instr->left();
989 LOperand* right = instr->right();
990 ASSERT(left->Equals(instr->result()));
991
992 Register right_reg = EmitLoadRegister(right, ip);
993 __ add(ToRegister(left), ToRegister(left), Operand(right_reg), SetCC);
994
995 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
996 DeoptimizeIf(vs, instr->environment());
997 }
998}
999
1000
1001void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1002 DoubleRegister left = ToDoubleRegister(instr->left());
1003 DoubleRegister right = ToDoubleRegister(instr->right());
1004 switch (instr->op()) {
1005 case Token::ADD:
1006 __ vadd(left, left, right);
1007 break;
1008 case Token::SUB:
1009 __ vsub(left, left, right);
1010 break;
1011 case Token::MUL:
1012 __ vmul(left, left, right);
1013 break;
1014 case Token::DIV:
1015 __ vdiv(left, left, right);
1016 break;
1017 case Token::MOD: {
1018 Abort("DoArithmeticD unimplemented for MOD.");
1019 break;
1020 }
1021 default:
1022 UNREACHABLE();
1023 break;
1024 }
1025}
1026
1027
1028void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1029 ASSERT(ToRegister(instr->left()).is(r1));
1030 ASSERT(ToRegister(instr->right()).is(r0));
1031 ASSERT(ToRegister(instr->result()).is(r0));
1032
1033 // TODO(regis): Implement TypeRecordingBinaryOpStub and replace current
1034 // GenericBinaryOpStub:
1035 // TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1036 GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, r1, r0);
1037 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1038}
1039
1040
1041int LCodeGen::GetNextEmittedBlock(int block) {
1042 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1043 LLabel* label = chunk_->GetLabel(i);
1044 if (!label->HasReplacement()) return i;
1045 }
1046 return -1;
1047}
1048
1049
1050void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1051 int next_block = GetNextEmittedBlock(current_block_);
1052 right_block = chunk_->LookupDestination(right_block);
1053 left_block = chunk_->LookupDestination(left_block);
1054
1055 if (right_block == left_block) {
1056 EmitGoto(left_block);
1057 } else if (left_block == next_block) {
1058 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1059 } else if (right_block == next_block) {
1060 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1061 } else {
1062 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1063 __ b(chunk_->GetAssemblyLabel(right_block));
1064 }
1065}
1066
1067
1068void LCodeGen::DoBranch(LBranch* instr) {
1069 int true_block = chunk_->LookupDestination(instr->true_block_id());
1070 int false_block = chunk_->LookupDestination(instr->false_block_id());
1071
1072 Representation r = instr->hydrogen()->representation();
1073 if (r.IsInteger32()) {
1074 Register reg = ToRegister(instr->input());
1075 __ cmp(reg, Operand(0));
1076 EmitBranch(true_block, false_block, nz);
1077 } else if (r.IsDouble()) {
1078 DoubleRegister reg = ToDoubleRegister(instr->input());
Ben Murdoch086aeea2011-05-13 15:57:08 +01001079 Register scratch = scratch0();
1080
1081 // Test for the double value. Zero and NaN are false.
1082 // Clear the Invalid cumulative exception flags.
1083 __ ClearFPSCRBits(kVFPInvalidExceptionBit, scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001084 __ vcmp(reg, 0.0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001085 // Retrieve the exception and status flags and
1086 // check for zero or an invalid exception.
1087 __ vmrs(scratch);
1088 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPInvalidExceptionBit));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001089 EmitBranch(true_block, false_block, ne);
1090 } else {
1091 ASSERT(r.IsTagged());
1092 Register reg = ToRegister(instr->input());
1093 if (instr->hydrogen()->type().IsBoolean()) {
1094 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1095 __ cmp(reg, ip);
1096 EmitBranch(true_block, false_block, eq);
1097 } else {
1098 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1099 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1100
1101 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1102 __ cmp(reg, ip);
1103 __ b(eq, false_label);
1104 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1105 __ cmp(reg, ip);
1106 __ b(eq, true_label);
1107 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1108 __ cmp(reg, ip);
1109 __ b(eq, false_label);
1110 __ cmp(reg, Operand(0));
1111 __ b(eq, false_label);
1112 __ tst(reg, Operand(kSmiTagMask));
1113 __ b(eq, true_label);
1114
Ben Murdoch086aeea2011-05-13 15:57:08 +01001115 // Test for double values. Zero and NaN are false.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001116 Label call_stub;
1117 DoubleRegister dbl_scratch = d0;
Steve Block9fac8402011-05-12 15:51:54 +01001118 Register scratch = scratch0();
1119 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001120 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01001121 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001122 __ b(ne, &call_stub);
1123 __ sub(ip, reg, Operand(kHeapObjectTag));
1124 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001125 // Clear the Invalid cumulative exception flags.
1126 __ ClearFPSCRBits(kVFPInvalidExceptionBit, scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001127 __ vcmp(dbl_scratch, 0.0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001128 // Retrieve the exception and status flags and
1129 // check for zero or an invalid exception.
1130 __ vmrs(scratch);
1131 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPInvalidExceptionBit));
1132 __ b(ne, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001133 __ b(true_label);
1134
1135 // The conversion stub doesn't cause garbage collections so it's
1136 // safe to not record a safepoint after the call.
1137 __ bind(&call_stub);
1138 ToBooleanStub stub(reg);
1139 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1140 __ stm(db_w, sp, saved_regs);
1141 __ CallStub(&stub);
1142 __ cmp(reg, Operand(0));
1143 __ ldm(ia_w, sp, saved_regs);
1144 EmitBranch(true_block, false_block, nz);
1145 }
1146 }
1147}
1148
1149
1150void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1151 // TODO(srdjan): Perform stack overflow check if this goto needs it
1152 // before jumping.
1153 block = chunk_->LookupDestination(block);
1154 int next_block = GetNextEmittedBlock(current_block_);
1155 if (block != next_block) {
1156 __ jmp(chunk_->GetAssemblyLabel(block));
1157 }
1158}
1159
1160
1161void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1162 UNIMPLEMENTED();
1163}
1164
1165
1166void LCodeGen::DoGoto(LGoto* instr) {
1167 // TODO(srdjan): Implement deferred stack check.
1168 EmitGoto(instr->block_id(), NULL);
1169}
1170
1171
1172Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1173 Condition cond = no_condition;
1174 switch (op) {
1175 case Token::EQ:
1176 case Token::EQ_STRICT:
1177 cond = eq;
1178 break;
1179 case Token::LT:
1180 cond = is_unsigned ? lo : lt;
1181 break;
1182 case Token::GT:
1183 cond = is_unsigned ? hi : gt;
1184 break;
1185 case Token::LTE:
1186 cond = is_unsigned ? ls : le;
1187 break;
1188 case Token::GTE:
1189 cond = is_unsigned ? hs : ge;
1190 break;
1191 case Token::IN:
1192 case Token::INSTANCEOF:
1193 default:
1194 UNREACHABLE();
1195 }
1196 return cond;
1197}
1198
1199
1200void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1201 __ cmp(ToRegister(left), ToOperand(right));
1202 Abort("EmitCmpI untested.");
1203}
1204
1205
1206void LCodeGen::DoCmpID(LCmpID* instr) {
1207 Abort("DoCmpID unimplemented.");
1208}
1209
1210
1211void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1212 Abort("DoCmpIDAndBranch unimplemented.");
1213}
1214
1215
1216void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1217 Register left = ToRegister(instr->left());
1218 Register right = ToRegister(instr->right());
1219 Register result = ToRegister(instr->result());
1220
1221 __ cmp(left, Operand(right));
1222 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1223 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1224 Abort("DoCmpJSObjectEq untested.");
1225}
1226
1227
1228void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1229 Abort("DoCmpJSObjectEqAndBranch unimplemented.");
1230}
1231
1232
1233void LCodeGen::DoIsNull(LIsNull* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001234 Register reg = ToRegister(instr->input());
1235 Register result = ToRegister(instr->result());
1236
1237 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1238 __ cmp(reg, ip);
1239 if (instr->is_strict()) {
1240 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1241 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1242 } else {
1243 Label true_value, false_value, done;
1244 __ b(eq, &true_value);
1245 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1246 __ cmp(ip, reg);
1247 __ b(eq, &true_value);
1248 __ tst(reg, Operand(kSmiTagMask));
1249 __ b(eq, &false_value);
1250 // Check for undetectable objects by looking in the bit field in
1251 // the map. The object has already been smi checked.
1252 Register scratch = result;
1253 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1254 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1255 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1256 __ b(ne, &true_value);
1257 __ bind(&false_value);
1258 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1259 __ jmp(&done);
1260 __ bind(&true_value);
1261 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1262 __ bind(&done);
1263 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001264}
1265
1266
1267void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001268 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001269 Register reg = ToRegister(instr->input());
1270
1271 // TODO(fsc): If the expression is known to be a smi, then it's
1272 // definitely not null. Jump to the false block.
1273
1274 int true_block = chunk_->LookupDestination(instr->true_block_id());
1275 int false_block = chunk_->LookupDestination(instr->false_block_id());
1276
1277 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1278 __ cmp(reg, ip);
1279 if (instr->is_strict()) {
1280 EmitBranch(true_block, false_block, eq);
1281 } else {
1282 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1283 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1284 __ b(eq, true_label);
1285 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1286 __ cmp(reg, ip);
1287 __ b(eq, true_label);
1288 __ tst(reg, Operand(kSmiTagMask));
1289 __ b(eq, false_label);
1290 // Check for undetectable objects by looking in the bit field in
1291 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001292 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1293 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1294 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1295 EmitBranch(true_block, false_block, ne);
1296 }
1297}
1298
1299
1300Condition LCodeGen::EmitIsObject(Register input,
1301 Register temp1,
1302 Register temp2,
1303 Label* is_not_object,
1304 Label* is_object) {
1305 Abort("EmitIsObject unimplemented.");
1306 return ne;
1307}
1308
1309
1310void LCodeGen::DoIsObject(LIsObject* instr) {
1311 Abort("DoIsObject unimplemented.");
1312}
1313
1314
1315void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1316 Abort("DoIsObjectAndBranch unimplemented.");
1317}
1318
1319
1320void LCodeGen::DoIsSmi(LIsSmi* instr) {
1321 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1322 Register result = ToRegister(instr->result());
1323 Register input_reg = EmitLoadRegister(instr->input(), ip);
1324 __ tst(input_reg, Operand(kSmiTagMask));
1325 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1326 Label done;
1327 __ b(eq, &done);
1328 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1329 __ bind(&done);
1330}
1331
1332
1333void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1334 int true_block = chunk_->LookupDestination(instr->true_block_id());
1335 int false_block = chunk_->LookupDestination(instr->false_block_id());
1336
1337 Register input_reg = EmitLoadRegister(instr->input(), ip);
1338 __ tst(input_reg, Operand(kSmiTagMask));
1339 EmitBranch(true_block, false_block, eq);
1340}
1341
1342
1343InstanceType LHasInstanceType::TestType() {
1344 InstanceType from = hydrogen()->from();
1345 InstanceType to = hydrogen()->to();
1346 if (from == FIRST_TYPE) return to;
1347 ASSERT(from == to || to == LAST_TYPE);
1348 return from;
1349}
1350
1351
1352Condition LHasInstanceType::BranchCondition() {
1353 InstanceType from = hydrogen()->from();
1354 InstanceType to = hydrogen()->to();
1355 if (from == to) return eq;
1356 if (to == LAST_TYPE) return hs;
1357 if (from == FIRST_TYPE) return ls;
1358 UNREACHABLE();
1359 return eq;
1360}
1361
1362
1363void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1364 Abort("DoHasInstanceType unimplemented.");
1365}
1366
1367
1368void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001369 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001370 Register input = ToRegister(instr->input());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001371
1372 int true_block = chunk_->LookupDestination(instr->true_block_id());
1373 int false_block = chunk_->LookupDestination(instr->false_block_id());
1374
1375 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1376
1377 __ tst(input, Operand(kSmiTagMask));
1378 __ b(eq, false_label);
1379
Steve Block9fac8402011-05-12 15:51:54 +01001380 __ CompareObjectType(input, scratch, scratch, instr->TestType());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001381 EmitBranch(true_block, false_block, instr->BranchCondition());
1382}
1383
1384
1385void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1386 Abort("DoHasCachedArrayIndex unimplemented.");
1387}
1388
1389
1390void LCodeGen::DoHasCachedArrayIndexAndBranch(
1391 LHasCachedArrayIndexAndBranch* instr) {
1392 Abort("DoHasCachedArrayIndexAndBranch unimplemented.");
1393}
1394
1395
1396// Branches to a label or falls through with the answer in the z flag. Trashes
1397// the temp registers, but not the input. Only input and temp2 may alias.
1398void LCodeGen::EmitClassOfTest(Label* is_true,
1399 Label* is_false,
1400 Handle<String>class_name,
1401 Register input,
1402 Register temp,
1403 Register temp2) {
1404 Abort("EmitClassOfTest unimplemented.");
1405}
1406
1407
1408void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1409 Abort("DoClassOfTest unimplemented.");
1410}
1411
1412
1413void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1414 Abort("DoClassOfTestAndBranch unimplemented.");
1415}
1416
1417
1418void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001419 Register reg = ToRegister(instr->input());
1420 Register temp = ToRegister(instr->temp());
1421 int true_block = instr->true_block_id();
1422 int false_block = instr->false_block_id();
1423
1424 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1425 __ cmp(temp, Operand(instr->map()));
1426 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001427}
1428
1429
1430void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001431 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
1432 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
1433
Ben Murdochb0fe1622011-05-05 13:52:32 +01001434 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1435 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1436
1437 Label true_value, done;
1438 __ tst(r0, r0);
Steve Block9fac8402011-05-12 15:51:54 +01001439 __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne);
1440 __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001441}
1442
1443
1444void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1445 Abort("DoInstanceOfAndBranch unimplemented.");
1446}
1447
1448
Ben Murdoch086aeea2011-05-13 15:57:08 +01001449void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1450 Abort("DoInstanceOfKnownGlobal unimplemented.");
1451}
1452
Ben Murdochb0fe1622011-05-05 13:52:32 +01001453
1454static Condition ComputeCompareCondition(Token::Value op) {
1455 switch (op) {
1456 case Token::EQ_STRICT:
1457 case Token::EQ:
1458 return eq;
1459 case Token::LT:
1460 return lt;
1461 case Token::GT:
1462 return gt;
1463 case Token::LTE:
1464 return le;
1465 case Token::GTE:
1466 return ge;
1467 default:
1468 UNREACHABLE();
1469 return no_condition;
1470 }
1471}
1472
1473
1474void LCodeGen::DoCmpT(LCmpT* instr) {
1475 Token::Value op = instr->op();
1476
1477 Handle<Code> ic = CompareIC::GetUninitialized(op);
1478 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1479
1480 Condition condition = ComputeCompareCondition(op);
1481 if (op == Token::GT || op == Token::LTE) {
1482 condition = ReverseCondition(condition);
1483 }
1484 __ cmp(r0, Operand(0));
1485 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex,
1486 condition);
1487 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex,
1488 NegateCondition(condition));
1489}
1490
1491
1492void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1493 Abort("DoCmpTAndBranch unimplemented.");
1494}
1495
1496
1497void LCodeGen::DoReturn(LReturn* instr) {
1498 if (FLAG_trace) {
1499 // Push the return value on the stack as the parameter.
1500 // Runtime::TraceExit returns its parameter in r0.
1501 __ push(r0);
1502 __ CallRuntime(Runtime::kTraceExit, 1);
1503 }
1504 int32_t sp_delta = (ParameterCount() + 1) * kPointerSize;
1505 __ mov(sp, fp);
1506 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1507 __ add(sp, sp, Operand(sp_delta));
1508 __ Jump(lr);
1509}
1510
1511
1512void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
1513 Register result = ToRegister(instr->result());
1514 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
1515 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
1516 if (instr->hydrogen()->check_hole_value()) {
1517 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1518 __ cmp(result, ip);
1519 DeoptimizeIf(eq, instr->environment());
1520 }
1521}
1522
1523
1524void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
1525 Register value = ToRegister(instr->input());
1526 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
1527 __ str(value, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
1528}
1529
1530
1531void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001532 Register object = ToRegister(instr->input());
1533 Register result = ToRegister(instr->result());
1534 if (instr->hydrogen()->is_in_object()) {
1535 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
1536 } else {
1537 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
1538 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
1539 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001540}
1541
1542
1543void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1544 ASSERT(ToRegister(instr->object()).is(r0));
1545 ASSERT(ToRegister(instr->result()).is(r0));
1546
1547 // Name is always in r2.
1548 __ mov(r2, Operand(instr->name()));
1549 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1550 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1551}
1552
1553
Steve Block9fac8402011-05-12 15:51:54 +01001554void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
1555 Register scratch = scratch0();
1556 Register function = ToRegister(instr->function());
1557 Register result = ToRegister(instr->result());
1558
1559 // Check that the function really is a function. Load map into the
1560 // result register.
1561 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
1562 DeoptimizeIf(ne, instr->environment());
1563
1564 // Make sure that the function has an instance prototype.
1565 Label non_instance;
1566 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
1567 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1568 __ b(ne, &non_instance);
1569
1570 // Get the prototype or initial map from the function.
1571 __ ldr(result,
1572 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1573
1574 // Check that the function has a prototype or an initial map.
1575 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1576 __ cmp(result, ip);
1577 DeoptimizeIf(eq, instr->environment());
1578
1579 // If the function does not have an initial map, we're done.
1580 Label done;
1581 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
1582 __ b(ne, &done);
1583
1584 // Get the prototype from the initial map.
1585 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
1586 __ jmp(&done);
1587
1588 // Non-instance prototype: Fetch prototype from constructor field
1589 // in initial map.
1590 __ bind(&non_instance);
1591 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1592
1593 // All done.
1594 __ bind(&done);
1595}
1596
1597
Ben Murdochb0fe1622011-05-05 13:52:32 +01001598void LCodeGen::DoLoadElements(LLoadElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001599 ASSERT(instr->result()->Equals(instr->input()));
1600 Register reg = ToRegister(instr->input());
1601 Register scratch = scratch0();
1602
1603 __ ldr(reg, FieldMemOperand(reg, JSObject::kElementsOffset));
1604 if (FLAG_debug_code) {
1605 Label done;
1606 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1607 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1608 __ cmp(scratch, ip);
1609 __ b(eq, &done);
1610 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
1611 __ cmp(scratch, ip);
1612 __ Check(eq, "Check for fast elements failed.");
1613 __ bind(&done);
1614 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001615}
1616
1617
1618void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001619 Register arguments = ToRegister(instr->arguments());
1620 Register length = ToRegister(instr->length());
1621 Register index = ToRegister(instr->index());
1622 Register result = ToRegister(instr->result());
1623
1624 // Bailout index is not a valid argument index. Use unsigned check to get
1625 // negative check for free.
1626 __ sub(length, length, index, SetCC);
1627 DeoptimizeIf(ls, instr->environment());
1628
1629 // There are two words between the frame pointer and the last argument.
1630 // Subtracting from length accounts for one of them add one more.
1631 __ add(length, length, Operand(1));
1632 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001633}
1634
1635
1636void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001637 Register elements = ToRegister(instr->elements());
1638 Register key = EmitLoadRegister(instr->key(), scratch0());
1639 Register result;
1640 Register scratch = scratch0();
1641
1642 if (instr->load_result() != NULL) {
1643 result = ToRegister(instr->load_result());
1644 } else {
1645 result = ToRegister(instr->result());
1646 ASSERT(result.is(elements));
1647 }
1648
1649 // Load the result.
1650 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
1651 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
1652
1653 Representation r = instr->hydrogen()->representation();
1654 if (r.IsInteger32()) {
1655 // Untag and check for smi.
1656 __ SmiUntag(result);
1657 DeoptimizeIf(cs, instr->environment());
1658 } else if (r.IsDouble()) {
1659 EmitNumberUntagD(result,
1660 ToDoubleRegister(instr->result()),
1661 instr->environment());
1662 } else {
1663 // Check for the hole value.
1664 ASSERT(r.IsTagged());
1665 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
1666 __ cmp(result, scratch);
1667 DeoptimizeIf(eq, instr->environment());
1668 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001669}
1670
1671
1672void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
1673 ASSERT(ToRegister(instr->object()).is(r1));
1674 ASSERT(ToRegister(instr->key()).is(r0));
1675
1676 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1677 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1678}
1679
1680
1681void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001682 Register scratch = scratch0();
1683 Register result = ToRegister(instr->result());
1684
1685 // Check if the calling frame is an arguments adaptor frame.
1686 Label done, adapted;
1687 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1688 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
1689 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1690
1691 // Result is the frame pointer for the frame if not adapted and for the real
1692 // frame below the adaptor frame if adapted.
1693 __ mov(result, fp, LeaveCC, ne);
1694 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001695}
1696
1697
1698void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001699 Register elem = ToRegister(instr->input());
1700 Register result = ToRegister(instr->result());
1701
1702 Label done;
1703
1704 // If no arguments adaptor frame the number of arguments is fixed.
1705 __ cmp(fp, elem);
1706 __ mov(result, Operand(scope()->num_parameters()));
1707 __ b(eq, &done);
1708
1709 // Arguments adaptor frame present. Get argument length from there.
1710 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1711 __ ldr(result,
1712 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
1713 __ SmiUntag(result);
1714
1715 // Argument length is in result register.
1716 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001717}
1718
1719
1720void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
1721 Abort("DoApplyArguments unimplemented.");
1722}
1723
1724
1725void LCodeGen::DoPushArgument(LPushArgument* instr) {
1726 LOperand* argument = instr->input();
1727 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
1728 Abort("DoPushArgument not implemented for double type.");
1729 } else {
1730 Register argument_reg = EmitLoadRegister(argument, ip);
1731 __ push(argument_reg);
1732 }
1733}
1734
1735
1736void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
1737 Register result = ToRegister(instr->result());
1738 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
1739}
1740
1741
1742void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
1743 Register result = ToRegister(instr->result());
1744 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
1745 __ ldr(result, FieldMemOperand(result, GlobalObject::kGlobalReceiverOffset));
1746}
1747
1748
1749void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
1750 int arity,
1751 LInstruction* instr) {
1752 // Change context if needed.
1753 bool change_context =
1754 (graph()->info()->closure()->context() != function->context()) ||
1755 scope()->contains_with() ||
1756 (scope()->num_heap_slots() > 0);
1757 if (change_context) {
1758 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1759 }
1760
1761 // Set r0 to arguments count if adaption is not needed. Assumes that r0
1762 // is available to write to at this point.
1763 if (!function->NeedsArgumentsAdaption()) {
1764 __ mov(r0, Operand(arity));
1765 }
1766
1767 LPointerMap* pointers = instr->pointer_map();
1768 RecordPosition(pointers->position());
1769
1770 // Invoke function.
1771 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1772 __ Call(ip);
1773
1774 // Setup deoptimization.
1775 RegisterLazyDeoptimization(instr);
1776
1777 // Restore context.
1778 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1779}
1780
1781
1782void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001783 ASSERT(ToRegister(instr->result()).is(r0));
1784 __ mov(r1, Operand(instr->function()));
1785 CallKnownFunction(instr->function(), instr->arity(), instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001786}
1787
1788
1789void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
1790 Abort("DoDeferredMathAbsTaggedHeapNumber unimplemented.");
1791}
1792
1793
1794void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
1795 Abort("DoMathAbs unimplemented.");
1796}
1797
1798
1799void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
1800 Abort("DoMathFloor unimplemented.");
1801}
1802
1803
1804void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
1805 Abort("DoMathSqrt unimplemented.");
1806}
1807
1808
1809void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
1810 switch (instr->op()) {
1811 case kMathAbs:
1812 DoMathAbs(instr);
1813 break;
1814 case kMathFloor:
1815 DoMathFloor(instr);
1816 break;
1817 case kMathSqrt:
1818 DoMathSqrt(instr);
1819 break;
1820 default:
1821 Abort("Unimplemented type of LUnaryMathOperation.");
1822 UNREACHABLE();
1823 }
1824}
1825
1826
1827void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001828 ASSERT(ToRegister(instr->result()).is(r0));
1829
1830 int arity = instr->arity();
1831 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
1832 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1833 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001834}
1835
1836
1837void LCodeGen::DoCallNamed(LCallNamed* instr) {
1838 ASSERT(ToRegister(instr->result()).is(r0));
1839
1840 int arity = instr->arity();
1841 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
1842 __ mov(r2, Operand(instr->name()));
1843 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1844 // Restore context register.
1845 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1846}
1847
1848
1849void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001850 ASSERT(ToRegister(instr->result()).is(r0));
1851
1852 int arity = instr->arity();
1853 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
1854 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1855 __ Drop(1);
1856 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001857}
1858
1859
1860void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001861 ASSERT(ToRegister(instr->result()).is(r0));
1862
1863 int arity = instr->arity();
1864 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
1865 __ mov(r2, Operand(instr->name()));
1866 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
1867 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001868}
1869
1870
1871void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
1872 ASSERT(ToRegister(instr->result()).is(r0));
1873 __ mov(r1, Operand(instr->target()));
1874 CallKnownFunction(instr->target(), instr->arity(), instr);
1875}
1876
1877
1878void LCodeGen::DoCallNew(LCallNew* instr) {
1879 ASSERT(ToRegister(instr->input()).is(r1));
1880 ASSERT(ToRegister(instr->result()).is(r0));
1881
1882 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
1883 __ mov(r0, Operand(instr->arity()));
1884 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
1885}
1886
1887
1888void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
1889 CallRuntime(instr->function(), instr->arity(), instr);
1890}
1891
1892
1893void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001894 Register object = ToRegister(instr->object());
1895 Register value = ToRegister(instr->value());
1896 Register scratch = scratch0();
1897 int offset = instr->offset();
1898
1899 ASSERT(!object.is(value));
1900
1901 if (!instr->transition().is_null()) {
1902 __ mov(scratch, Operand(instr->transition()));
1903 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1904 }
1905
1906 // Do the store.
1907 if (instr->is_in_object()) {
1908 __ str(value, FieldMemOperand(object, offset));
1909 if (instr->needs_write_barrier()) {
1910 // Update the write barrier for the object for in-object properties.
1911 __ RecordWrite(object, Operand(offset), value, scratch);
1912 }
1913 } else {
1914 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
1915 __ str(value, FieldMemOperand(scratch, offset));
1916 if (instr->needs_write_barrier()) {
1917 // Update the write barrier for the properties array.
1918 // object is used as a scratch register.
1919 __ RecordWrite(scratch, Operand(offset), value, object);
1920 }
1921 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001922}
1923
1924
1925void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
1926 ASSERT(ToRegister(instr->object()).is(r1));
1927 ASSERT(ToRegister(instr->value()).is(r0));
1928
1929 // Name is always in r2.
1930 __ mov(r2, Operand(instr->name()));
1931 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1932 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1933}
1934
1935
1936void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001937 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01001938 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001939}
1940
1941
1942void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001943 Register value = ToRegister(instr->value());
1944 Register elements = ToRegister(instr->object());
1945 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
1946 Register scratch = scratch0();
1947
1948 // Do the store.
1949 if (instr->key()->IsConstantOperand()) {
1950 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
1951 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
1952 int offset =
1953 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
1954 __ str(value, FieldMemOperand(elements, offset));
1955 } else {
1956 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
1957 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
1958 }
1959
1960 if (instr->hydrogen()->NeedsWriteBarrier()) {
1961 // Compute address of modified element and store it into key register.
1962 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
1963 __ RecordWrite(elements, key, value);
1964 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001965}
1966
1967
1968void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
1969 ASSERT(ToRegister(instr->object()).is(r2));
1970 ASSERT(ToRegister(instr->key()).is(r1));
1971 ASSERT(ToRegister(instr->value()).is(r0));
1972
1973 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1974 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1975}
1976
1977
1978void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
1979 Abort("DoInteger32ToDouble unimplemented.");
1980}
1981
1982
1983void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
1984 class DeferredNumberTagI: public LDeferredCode {
1985 public:
1986 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
1987 : LDeferredCode(codegen), instr_(instr) { }
1988 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
1989 private:
1990 LNumberTagI* instr_;
1991 };
1992
1993 LOperand* input = instr->input();
1994 ASSERT(input->IsRegister() && input->Equals(instr->result()));
1995 Register reg = ToRegister(input);
1996
1997 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
1998 __ SmiTag(reg, SetCC);
1999 __ b(vs, deferred->entry());
2000 __ bind(deferred->exit());
2001}
2002
2003
2004void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
2005 Label slow;
2006 Register reg = ToRegister(instr->input());
2007 DoubleRegister dbl_scratch = d0;
2008 SwVfpRegister flt_scratch = s0;
2009
2010 // Preserve the value of all registers.
2011 __ PushSafepointRegisters();
2012
2013 // There was overflow, so bits 30 and 31 of the original integer
2014 // disagree. Try to allocate a heap number in new space and store
2015 // the value in there. If that fails, call the runtime system.
2016 Label done;
2017 __ SmiUntag(reg);
2018 __ eor(reg, reg, Operand(0x80000000));
2019 __ vmov(flt_scratch, reg);
2020 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
2021 if (FLAG_inline_new) {
2022 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2023 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
2024 if (!reg.is(r5)) __ mov(reg, r5);
2025 __ b(&done);
2026 }
2027
2028 // Slow case: Call the runtime system to do the number allocation.
2029 __ bind(&slow);
2030
2031 // TODO(3095996): Put a valid pointer value in the stack slot where the result
2032 // register is stored, as this register is in the pointer map, but contains an
2033 // integer value.
2034 __ mov(ip, Operand(0));
2035 int reg_stack_index = __ SafepointRegisterStackIndex(reg.code());
2036 __ str(ip, MemOperand(sp, reg_stack_index * kPointerSize));
2037
2038 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2039 RecordSafepointWithRegisters(
2040 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2041 if (!reg.is(r0)) __ mov(reg, r0);
2042
2043 // Done. Put the value in dbl_scratch into the value of the allocated heap
2044 // number.
2045 __ bind(&done);
2046 __ sub(ip, reg, Operand(kHeapObjectTag));
2047 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
2048 __ str(reg, MemOperand(sp, reg_stack_index * kPointerSize));
2049 __ PopSafepointRegisters();
2050}
2051
2052
2053void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
2054 class DeferredNumberTagD: public LDeferredCode {
2055 public:
2056 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
2057 : LDeferredCode(codegen), instr_(instr) { }
2058 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
2059 private:
2060 LNumberTagD* instr_;
2061 };
2062
2063 DoubleRegister input_reg = ToDoubleRegister(instr->input());
Steve Block9fac8402011-05-12 15:51:54 +01002064 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002065 Register reg = ToRegister(instr->result());
2066 Register temp1 = ToRegister(instr->temp1());
2067 Register temp2 = ToRegister(instr->temp2());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002068
2069 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
2070 if (FLAG_inline_new) {
2071 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
2072 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
2073 } else {
2074 __ jmp(deferred->entry());
2075 }
2076 __ bind(deferred->exit());
2077 __ sub(ip, reg, Operand(kHeapObjectTag));
2078 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
2079}
2080
2081
2082void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
2083 // TODO(3095996): Get rid of this. For now, we need to make the
2084 // result register contain a valid pointer because it is already
2085 // contained in the register pointer map.
2086 Register reg = ToRegister(instr->result());
2087 __ mov(reg, Operand(0));
2088
2089 __ PushSafepointRegisters();
2090 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2091 RecordSafepointWithRegisters(
2092 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2093 int reg_stack_index = __ SafepointRegisterStackIndex(reg.code());
2094 __ str(r0, MemOperand(sp, reg_stack_index * kPointerSize));
2095 __ PopSafepointRegisters();
2096}
2097
2098
2099void LCodeGen::DoSmiTag(LSmiTag* instr) {
2100 LOperand* input = instr->input();
2101 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2102 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
2103 __ SmiTag(ToRegister(input));
2104}
2105
2106
2107void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002108 LOperand* input = instr->input();
2109 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2110 if (instr->needs_check()) {
2111 __ tst(ToRegister(input), Operand(kSmiTagMask));
2112 DeoptimizeIf(ne, instr->environment());
2113 }
2114 __ SmiUntag(ToRegister(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002115}
2116
2117
2118void LCodeGen::EmitNumberUntagD(Register input_reg,
2119 DoubleRegister result_reg,
2120 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01002121 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002122 SwVfpRegister flt_scratch = s0;
2123 ASSERT(!result_reg.is(d0));
2124
2125 Label load_smi, heap_number, done;
2126
2127 // Smi check.
2128 __ tst(input_reg, Operand(kSmiTagMask));
2129 __ b(eq, &load_smi);
2130
2131 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01002132 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002133 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01002134 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002135 __ b(eq, &heap_number);
2136
2137 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2138 __ cmp(input_reg, Operand(ip));
2139 DeoptimizeIf(ne, env);
2140
2141 // Convert undefined to NaN.
2142 __ LoadRoot(ip, Heap::kNanValueRootIndex);
2143 __ sub(ip, ip, Operand(kHeapObjectTag));
2144 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
2145 __ jmp(&done);
2146
2147 // Heap number to double register conversion.
2148 __ bind(&heap_number);
2149 __ sub(ip, input_reg, Operand(kHeapObjectTag));
2150 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
2151 __ jmp(&done);
2152
2153 // Smi to double register conversion
2154 __ bind(&load_smi);
2155 __ SmiUntag(input_reg); // Untag smi before converting to float.
2156 __ vmov(flt_scratch, input_reg);
2157 __ vcvt_f64_s32(result_reg, flt_scratch);
2158 __ SmiTag(input_reg); // Retag smi.
2159 __ bind(&done);
2160}
2161
2162
2163class DeferredTaggedToI: public LDeferredCode {
2164 public:
2165 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
2166 : LDeferredCode(codegen), instr_(instr) { }
2167 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
2168 private:
2169 LTaggedToI* instr_;
2170};
2171
2172
2173void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
2174 Label done;
2175 Register input_reg = ToRegister(instr->input());
Steve Block9fac8402011-05-12 15:51:54 +01002176 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002177 DoubleRegister dbl_scratch = d0;
2178 SwVfpRegister flt_scratch = s0;
2179 DoubleRegister dbl_tmp = ToDoubleRegister(instr->temp());
2180
2181 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01002182 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002183 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01002184 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002185
2186 if (instr->truncating()) {
2187 Label heap_number;
2188 __ b(eq, &heap_number);
2189 // Check for undefined. Undefined is converted to zero for truncating
2190 // conversions.
2191 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2192 __ cmp(input_reg, Operand(ip));
2193 DeoptimizeIf(ne, instr->environment());
2194 __ mov(input_reg, Operand(0));
2195 __ b(&done);
2196
2197 __ bind(&heap_number);
2198 __ sub(ip, input_reg, Operand(kHeapObjectTag));
2199 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
2200 __ vcmp(dbl_tmp, 0.0); // Sets overflow bit if NaN.
2201 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
2202 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
2203 __ vmrs(pc); // Move vector status bits to normal status bits.
2204 // Overflow bit is set if dbl_tmp is Nan.
2205 __ cmn(input_reg, Operand(1), vc); // 0x7fffffff + 1 -> overflow.
2206 __ cmp(input_reg, Operand(1), vc); // 0x80000000 - 1 -> overflow.
2207 DeoptimizeIf(vs, instr->environment()); // Saturation may have occured.
2208
2209 } else {
2210 // Deoptimize if we don't have a heap number.
2211 DeoptimizeIf(ne, instr->environment());
2212
2213 __ sub(ip, input_reg, Operand(kHeapObjectTag));
2214 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
2215 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
2216 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
2217 // Non-truncating conversion means that we cannot lose bits, so we convert
2218 // back to check; note that using non-overlapping s and d regs would be
2219 // slightly faster.
2220 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
2221 __ vcmp(dbl_scratch, dbl_tmp);
2222 __ vmrs(pc); // Move vector status bits to normal status bits.
2223 DeoptimizeIf(ne, instr->environment()); // Not equal or unordered.
2224 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2225 __ tst(input_reg, Operand(input_reg));
2226 __ b(ne, &done);
2227 __ vmov(lr, ip, dbl_tmp);
2228 __ tst(ip, Operand(1 << 31)); // Test sign bit.
2229 DeoptimizeIf(ne, instr->environment());
2230 }
2231 }
2232 __ bind(&done);
2233}
2234
2235
2236void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
2237 LOperand* input = instr->input();
2238 ASSERT(input->IsRegister());
2239 ASSERT(input->Equals(instr->result()));
2240
2241 Register input_reg = ToRegister(input);
2242
2243 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
2244
2245 // Smi check.
2246 __ tst(input_reg, Operand(kSmiTagMask));
2247 __ b(ne, deferred->entry());
2248
2249 // Smi to int32 conversion
2250 __ SmiUntag(input_reg); // Untag smi.
2251
2252 __ bind(deferred->exit());
2253}
2254
2255
2256void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
2257 LOperand* input = instr->input();
2258 ASSERT(input->IsRegister());
2259 LOperand* result = instr->result();
2260 ASSERT(result->IsDoubleRegister());
2261
2262 Register input_reg = ToRegister(input);
2263 DoubleRegister result_reg = ToDoubleRegister(result);
2264
2265 EmitNumberUntagD(input_reg, result_reg, instr->environment());
2266}
2267
2268
2269void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
2270 Abort("DoDoubleToI unimplemented.");
2271}
2272
2273
2274void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
2275 LOperand* input = instr->input();
2276 ASSERT(input->IsRegister());
2277 __ tst(ToRegister(input), Operand(kSmiTagMask));
2278 DeoptimizeIf(instr->condition(), instr->environment());
2279}
2280
2281
2282void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002283 Register input = ToRegister(instr->input());
2284 Register scratch = scratch0();
2285 InstanceType first = instr->hydrogen()->first();
2286 InstanceType last = instr->hydrogen()->last();
2287
2288 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2289 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2290 __ cmp(scratch, Operand(first));
2291
2292 // If there is only one type in the interval check for equality.
2293 if (first == last) {
2294 DeoptimizeIf(ne, instr->environment());
2295 } else {
2296 DeoptimizeIf(lo, instr->environment());
2297 // Omit check for the last type.
2298 if (last != LAST_TYPE) {
2299 __ cmp(scratch, Operand(last));
2300 DeoptimizeIf(hi, instr->environment());
2301 }
2302 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002303}
2304
2305
2306void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
2307 ASSERT(instr->input()->IsRegister());
2308 Register reg = ToRegister(instr->input());
2309 __ cmp(reg, Operand(instr->hydrogen()->target()));
2310 DeoptimizeIf(ne, instr->environment());
2311}
2312
2313
2314void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002315 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002316 LOperand* input = instr->input();
2317 ASSERT(input->IsRegister());
2318 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01002319 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
2320 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002321 DeoptimizeIf(ne, instr->environment());
2322}
2323
2324
2325void LCodeGen::LoadPrototype(Register result,
2326 Handle<JSObject> prototype) {
Steve Block9fac8402011-05-12 15:51:54 +01002327 if (Heap::InNewSpace(*prototype)) {
2328 Handle<JSGlobalPropertyCell> cell =
2329 Factory::NewJSGlobalPropertyCell(prototype);
2330 __ mov(result, Operand(cell));
2331 } else {
2332 __ mov(result, Operand(prototype));
2333 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002334}
2335
2336
2337void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002338 Register temp1 = ToRegister(instr->temp1());
2339 Register temp2 = ToRegister(instr->temp2());
2340
2341 Handle<JSObject> holder = instr->holder();
2342 Handle<Map> receiver_map = instr->receiver_map();
2343 Handle<JSObject> current_prototype(JSObject::cast(receiver_map->prototype()));
2344
2345 // Load prototype object.
2346 LoadPrototype(temp1, current_prototype);
2347
2348 // Check prototype maps up to the holder.
2349 while (!current_prototype.is_identical_to(holder)) {
2350 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
2351 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
2352 DeoptimizeIf(ne, instr->environment());
2353 current_prototype =
2354 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
2355 // Load next prototype object.
2356 LoadPrototype(temp1, current_prototype);
2357 }
2358
2359 // Check the holder map.
2360 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
2361 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
2362 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002363}
2364
2365
2366void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002367 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2368 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
2369 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2370 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
2371 __ Push(r3, r2, r1);
2372
2373 // Pick the right runtime function or stub to call.
2374 int length = instr->hydrogen()->length();
2375 if (instr->hydrogen()->IsCopyOnWrite()) {
2376 ASSERT(instr->hydrogen()->depth() == 1);
2377 FastCloneShallowArrayStub::Mode mode =
2378 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
2379 FastCloneShallowArrayStub stub(mode, length);
2380 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2381 } else if (instr->hydrogen()->depth() > 1) {
2382 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
2383 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
2384 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
2385 } else {
2386 FastCloneShallowArrayStub::Mode mode =
2387 FastCloneShallowArrayStub::CLONE_ELEMENTS;
2388 FastCloneShallowArrayStub stub(mode, length);
2389 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2390 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002391}
2392
2393
2394void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002395 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2396 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
2397 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2398 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
2399 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
2400 __ Push(r4, r3, r2, r1);
2401
2402 // Pick the right runtime function to call.
2403 if (instr->hydrogen()->depth() > 1) {
2404 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
2405 } else {
2406 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
2407 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002408}
2409
2410
2411void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002412 Label materialized;
2413 // Registers will be used as follows:
2414 // r3 = JS function.
2415 // r7 = literals array.
2416 // r1 = regexp literal.
2417 // r0 = regexp literal clone.
2418 // r2 and r4-r6 are used as temporaries.
2419 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2420 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
2421 int literal_offset = FixedArray::kHeaderSize +
2422 instr->hydrogen()->literal_index() * kPointerSize;
2423 __ ldr(r1, FieldMemOperand(r7, literal_offset));
2424 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2425 __ cmp(r1, ip);
2426 __ b(ne, &materialized);
2427
2428 // Create regexp literal using runtime function
2429 // Result will be in r0.
2430 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2431 __ mov(r5, Operand(instr->hydrogen()->pattern()));
2432 __ mov(r4, Operand(instr->hydrogen()->flags()));
2433 __ Push(r7, r6, r5, r4);
2434 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
2435 __ mov(r1, r0);
2436
2437 __ bind(&materialized);
2438 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
2439 Label allocated, runtime_allocate;
2440
2441 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
2442 __ jmp(&allocated);
2443
2444 __ bind(&runtime_allocate);
2445 __ mov(r0, Operand(Smi::FromInt(size)));
2446 __ Push(r1, r0);
2447 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
2448 __ pop(r1);
2449
2450 __ bind(&allocated);
2451 // Copy the content into the newly allocated memory.
2452 // (Unroll copy loop once for better throughput).
2453 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
2454 __ ldr(r3, FieldMemOperand(r1, i));
2455 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
2456 __ str(r3, FieldMemOperand(r0, i));
2457 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
2458 }
2459 if ((size % (2 * kPointerSize)) != 0) {
2460 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
2461 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
2462 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002463}
2464
2465
2466void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002467 // Use the fast case closure allocation code that allocates in new
2468 // space for nested functions that don't need literals cloning.
2469 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
2470 bool pretenure = !instr->hydrogen()->pretenure();
2471 if (shared_info->num_literals() == 0 && !pretenure) {
2472 FastNewClosureStub stub;
2473 __ mov(r1, Operand(shared_info));
2474 __ push(r1);
2475 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2476 } else {
2477 __ mov(r2, Operand(shared_info));
2478 __ mov(r1, Operand(pretenure
2479 ? Factory::true_value()
2480 : Factory::false_value()));
2481 __ Push(cp, r2, r1);
2482 CallRuntime(Runtime::kNewClosure, 3, instr);
2483 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002484}
2485
2486
2487void LCodeGen::DoTypeof(LTypeof* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002488 Register input = ToRegister(instr->input());
2489 __ push(input);
2490 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002491}
2492
2493
2494void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002495 Register input = ToRegister(instr->input());
2496 Register result = ToRegister(instr->result());
2497 Label true_label;
2498 Label false_label;
2499 Label done;
2500
2501 Condition final_branch_condition = EmitTypeofIs(&true_label,
2502 &false_label,
2503 input,
2504 instr->type_literal());
2505 __ b(final_branch_condition, &true_label);
2506 __ bind(&false_label);
2507 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2508 __ b(&done);
2509
2510 __ bind(&true_label);
2511 __ LoadRoot(result, Heap::kTrueValueRootIndex);
2512
2513 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002514}
2515
2516
2517void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
2518 Register input = ToRegister(instr->input());
2519 int true_block = chunk_->LookupDestination(instr->true_block_id());
2520 int false_block = chunk_->LookupDestination(instr->false_block_id());
2521 Label* true_label = chunk_->GetAssemblyLabel(true_block);
2522 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2523
2524 Condition final_branch_condition = EmitTypeofIs(true_label,
2525 false_label,
2526 input,
2527 instr->type_literal());
2528
2529 EmitBranch(true_block, false_block, final_branch_condition);
2530}
2531
2532
2533Condition LCodeGen::EmitTypeofIs(Label* true_label,
2534 Label* false_label,
2535 Register input,
2536 Handle<String> type_name) {
2537 Condition final_branch_condition = no_condition;
Steve Block9fac8402011-05-12 15:51:54 +01002538 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002539 if (type_name->Equals(Heap::number_symbol())) {
2540 __ tst(input, Operand(kSmiTagMask));
2541 __ b(eq, true_label);
2542 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2543 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2544 __ cmp(input, Operand(ip));
2545 final_branch_condition = eq;
2546
2547 } else if (type_name->Equals(Heap::string_symbol())) {
2548 __ tst(input, Operand(kSmiTagMask));
2549 __ b(eq, false_label);
2550 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2551 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2552 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2553 __ b(ne, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01002554 __ CompareInstanceType(input, scratch, FIRST_NONSTRING_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002555 final_branch_condition = lo;
2556
2557 } else if (type_name->Equals(Heap::boolean_symbol())) {
2558 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2559 __ cmp(input, ip);
2560 __ b(eq, true_label);
2561 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2562 __ cmp(input, ip);
2563 final_branch_condition = eq;
2564
2565 } else if (type_name->Equals(Heap::undefined_symbol())) {
2566 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2567 __ cmp(input, ip);
2568 __ b(eq, true_label);
2569 __ tst(input, Operand(kSmiTagMask));
2570 __ b(eq, false_label);
2571 // Check for undetectable objects => true.
2572 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2573 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2574 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2575 final_branch_condition = ne;
2576
2577 } else if (type_name->Equals(Heap::function_symbol())) {
2578 __ tst(input, Operand(kSmiTagMask));
2579 __ b(eq, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01002580 __ CompareObjectType(input, input, scratch, JS_FUNCTION_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002581 __ b(eq, true_label);
2582 // Regular expressions => 'function' (they are callable).
Steve Block9fac8402011-05-12 15:51:54 +01002583 __ CompareInstanceType(input, scratch, JS_REGEXP_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002584 final_branch_condition = eq;
2585
2586 } else if (type_name->Equals(Heap::object_symbol())) {
2587 __ tst(input, Operand(kSmiTagMask));
2588 __ b(eq, false_label);
2589 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2590 __ cmp(input, ip);
2591 __ b(eq, true_label);
2592 // Regular expressions => 'function', not 'object'.
Steve Block9fac8402011-05-12 15:51:54 +01002593 __ CompareObjectType(input, input, scratch, JS_REGEXP_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002594 __ b(eq, false_label);
2595 // Check for undetectable objects => false.
2596 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2597 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2598 __ b(ne, false_label);
2599 // Check for JS objects => true.
Steve Block9fac8402011-05-12 15:51:54 +01002600 __ CompareInstanceType(input, scratch, FIRST_JS_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002601 __ b(lo, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01002602 __ CompareInstanceType(input, scratch, LAST_JS_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002603 final_branch_condition = ls;
2604
2605 } else {
2606 final_branch_condition = ne;
2607 __ b(false_label);
2608 // A dead branch instruction will be generated after this point.
2609 }
2610
2611 return final_branch_condition;
2612}
2613
2614
2615void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
2616 // No code for lazy bailout instruction. Used to capture environment after a
2617 // call for populating the safepoint data with deoptimization data.
2618}
2619
2620
2621void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
2622 DeoptimizeIf(no_condition, instr->environment());
2623}
2624
2625
2626void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
2627 Abort("DoDeleteProperty unimplemented.");
2628}
2629
2630
2631void LCodeGen::DoStackCheck(LStackCheck* instr) {
2632 // Perform stack overflow check.
2633 Label ok;
2634 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
2635 __ cmp(sp, Operand(ip));
2636 __ b(hs, &ok);
2637 StackCheckStub stub;
2638 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2639 __ bind(&ok);
2640}
2641
2642
2643void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
2644 Abort("DoOsrEntry unimplemented.");
2645}
2646
2647
2648#undef __
2649
2650} } // namespace v8::internal