blob: 55df8b4cb83ef9bc37e4f6aa63d83e03ccbd5008 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "arm/lithium-codegen-arm.h"
29#include "code-stubs.h"
30#include "stub-cache.h"
31
32namespace v8 {
33namespace internal {
34
35
36class SafepointGenerator : public PostCallGenerator {
37 public:
38 SafepointGenerator(LCodeGen* codegen,
39 LPointerMap* pointers,
40 int deoptimization_index)
41 : codegen_(codegen),
42 pointers_(pointers),
43 deoptimization_index_(deoptimization_index) { }
44 virtual ~SafepointGenerator() { }
45
46 virtual void Generate() {
47 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
48 }
49
50 private:
51 LCodeGen* codegen_;
52 LPointerMap* pointers_;
53 int deoptimization_index_;
54};
55
56
Ben Murdochb8e0da22011-05-16 14:20:40 +010057class LGapNode: public ZoneObject {
58 public:
59 explicit LGapNode(LOperand* operand)
60 : operand_(operand), resolved_(false), visited_id_(-1) { }
61
62 LOperand* operand() const { return operand_; }
63 bool IsResolved() const { return !IsAssigned() || resolved_; }
64 void MarkResolved() {
65 ASSERT(!IsResolved());
66 resolved_ = true;
67 }
68 int visited_id() const { return visited_id_; }
69 void set_visited_id(int id) {
70 ASSERT(id > visited_id_);
71 visited_id_ = id;
72 }
73
74 bool IsAssigned() const { return assigned_from_.is_set(); }
75 LGapNode* assigned_from() const { return assigned_from_.get(); }
76 void set_assigned_from(LGapNode* n) { assigned_from_.set(n); }
77
78 private:
79 LOperand* operand_;
80 SetOncePointer<LGapNode> assigned_from_;
81 bool resolved_;
82 int visited_id_;
83};
84
85
86LGapResolver::LGapResolver()
87 : nodes_(32),
88 identified_cycles_(4),
89 result_(16),
90 next_visited_id_(0) {
91}
92
93
94const ZoneList<LMoveOperands>* LGapResolver::Resolve(
95 const ZoneList<LMoveOperands>* moves,
96 LOperand* marker_operand) {
97 nodes_.Rewind(0);
98 identified_cycles_.Rewind(0);
99 result_.Rewind(0);
100 next_visited_id_ = 0;
101
102 for (int i = 0; i < moves->length(); ++i) {
103 LMoveOperands move = moves->at(i);
104 if (!move.IsRedundant()) RegisterMove(move);
105 }
106
107 for (int i = 0; i < identified_cycles_.length(); ++i) {
108 ResolveCycle(identified_cycles_[i], marker_operand);
109 }
110
111 int unresolved_nodes;
112 do {
113 unresolved_nodes = 0;
114 for (int j = 0; j < nodes_.length(); j++) {
115 LGapNode* node = nodes_[j];
116 if (!node->IsResolved() && node->assigned_from()->IsResolved()) {
117 AddResultMove(node->assigned_from(), node);
118 node->MarkResolved();
119 }
120 if (!node->IsResolved()) ++unresolved_nodes;
121 }
122 } while (unresolved_nodes > 0);
123 return &result_;
124}
125
126
127void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) {
128 AddResultMove(from->operand(), to->operand());
129}
130
131
132void LGapResolver::AddResultMove(LOperand* from, LOperand* to) {
133 result_.Add(LMoveOperands(from, to));
134}
135
136
137void LGapResolver::ResolveCycle(LGapNode* start, LOperand* marker_operand) {
138 ZoneList<LOperand*> cycle_operands(8);
139 cycle_operands.Add(marker_operand);
140 LGapNode* cur = start;
141 do {
142 cur->MarkResolved();
143 cycle_operands.Add(cur->operand());
144 cur = cur->assigned_from();
145 } while (cur != start);
146 cycle_operands.Add(marker_operand);
147
148 for (int i = cycle_operands.length() - 1; i > 0; --i) {
149 LOperand* from = cycle_operands[i];
150 LOperand* to = cycle_operands[i - 1];
151 AddResultMove(from, to);
152 }
153}
154
155
156bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) {
157 ASSERT(a != b);
158 LGapNode* cur = a;
159 while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) {
160 cur->set_visited_id(visited_id);
161 cur = cur->assigned_from();
162 }
163
164 return cur == b;
165}
166
167
168bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) {
169 ASSERT(a != b);
170 return CanReach(a, b, next_visited_id_++);
171}
172
173
174void LGapResolver::RegisterMove(LMoveOperands move) {
175 if (move.source()->IsConstantOperand()) {
176 // Constant moves should be last in the machine code. Therefore add them
177 // first to the result set.
178 AddResultMove(move.source(), move.destination());
179 } else {
180 LGapNode* from = LookupNode(move.source());
181 LGapNode* to = LookupNode(move.destination());
182 if (to->IsAssigned() && to->assigned_from() == from) {
183 move.Eliminate();
184 return;
185 }
186 ASSERT(!to->IsAssigned());
187 if (CanReach(from, to)) {
188 // This introduces a cycle. Save.
189 identified_cycles_.Add(from);
190 }
191 to->set_assigned_from(from);
192 }
193}
194
195
196LGapNode* LGapResolver::LookupNode(LOperand* operand) {
197 for (int i = 0; i < nodes_.length(); ++i) {
198 if (nodes_[i]->operand()->Equals(operand)) return nodes_[i];
199 }
200
201 // No node found => create a new one.
202 LGapNode* result = new LGapNode(operand);
203 nodes_.Add(result);
204 return result;
205}
206
207
Ben Murdochb0fe1622011-05-05 13:52:32 +0100208#define __ masm()->
209
210bool LCodeGen::GenerateCode() {
211 HPhase phase("Code generation", chunk());
212 ASSERT(is_unused());
213 status_ = GENERATING;
214 CpuFeatures::Scope scope1(VFP3);
215 CpuFeatures::Scope scope2(ARMv7);
216 return GeneratePrologue() &&
217 GenerateBody() &&
218 GenerateDeferredCode() &&
219 GenerateSafepointTable();
220}
221
222
223void LCodeGen::FinishCode(Handle<Code> code) {
224 ASSERT(is_done());
225 code->set_stack_slots(StackSlotCount());
226 code->set_safepoint_table_start(safepoints_.GetCodeOffset());
227 PopulateDeoptimizationData(code);
228}
229
230
231void LCodeGen::Abort(const char* format, ...) {
232 if (FLAG_trace_bailout) {
233 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
234 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
235 va_list arguments;
236 va_start(arguments, format);
237 OS::VPrint(format, arguments);
238 va_end(arguments);
239 PrintF("\n");
240 }
241 status_ = ABORTED;
242}
243
244
245void LCodeGen::Comment(const char* format, ...) {
246 if (!FLAG_code_comments) return;
247 char buffer[4 * KB];
248 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
249 va_list arguments;
250 va_start(arguments, format);
251 builder.AddFormattedList(format, arguments);
252 va_end(arguments);
253
254 // Copy the string before recording it in the assembler to avoid
255 // issues when the stack allocated buffer goes out of scope.
256 size_t length = builder.position();
257 Vector<char> copy = Vector<char>::New(length + 1);
258 memcpy(copy.start(), builder.Finalize(), copy.length());
259 masm()->RecordComment(copy.start());
260}
261
262
263bool LCodeGen::GeneratePrologue() {
264 ASSERT(is_generating());
265
266#ifdef DEBUG
267 if (strlen(FLAG_stop_at) > 0 &&
268 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
269 __ stop("stop_at");
270 }
271#endif
272
273 // r1: Callee's JS function.
274 // cp: Callee's context.
275 // fp: Caller's frame pointer.
276 // lr: Caller's pc.
277
278 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
279 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
280
281 // Reserve space for the stack slots needed by the code.
282 int slots = StackSlotCount();
283 if (slots > 0) {
284 if (FLAG_debug_code) {
285 __ mov(r0, Operand(slots));
286 __ mov(r2, Operand(kSlotsZapValue));
287 Label loop;
288 __ bind(&loop);
289 __ push(r2);
290 __ sub(r0, r0, Operand(1), SetCC);
291 __ b(ne, &loop);
292 } else {
293 __ sub(sp, sp, Operand(slots * kPointerSize));
294 }
295 }
296
297 // Trace the call.
298 if (FLAG_trace) {
299 __ CallRuntime(Runtime::kTraceEnter, 0);
300 }
301 return !is_aborted();
302}
303
304
305bool LCodeGen::GenerateBody() {
306 ASSERT(is_generating());
307 bool emit_instructions = true;
308 for (current_instruction_ = 0;
309 !is_aborted() && current_instruction_ < instructions_->length();
310 current_instruction_++) {
311 LInstruction* instr = instructions_->at(current_instruction_);
312 if (instr->IsLabel()) {
313 LLabel* label = LLabel::cast(instr);
314 emit_instructions = !label->HasReplacement();
315 }
316
317 if (emit_instructions) {
318 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
319 instr->CompileToNative(this);
320 }
321 }
322 return !is_aborted();
323}
324
325
326LInstruction* LCodeGen::GetNextInstruction() {
327 if (current_instruction_ < instructions_->length() - 1) {
328 return instructions_->at(current_instruction_ + 1);
329 } else {
330 return NULL;
331 }
332}
333
334
335bool LCodeGen::GenerateDeferredCode() {
336 ASSERT(is_generating());
337 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
338 LDeferredCode* code = deferred_[i];
339 __ bind(code->entry());
340 code->Generate();
341 __ jmp(code->exit());
342 }
343
Ben Murdochb8e0da22011-05-16 14:20:40 +0100344 // Force constant pool emission at the end of deferred code to make
345 // sure that no constant pools are emitted after the official end of
346 // the instruction sequence.
347 masm()->CheckConstPool(true, false);
348
Ben Murdochb0fe1622011-05-05 13:52:32 +0100349 // Deferred code is the last part of the instruction sequence. Mark
350 // the generated code as done unless we bailed out.
351 if (!is_aborted()) status_ = DONE;
352 return !is_aborted();
353}
354
355
356bool LCodeGen::GenerateSafepointTable() {
357 ASSERT(is_done());
358 safepoints_.Emit(masm(), StackSlotCount());
359 return !is_aborted();
360}
361
362
363Register LCodeGen::ToRegister(int index) const {
364 return Register::FromAllocationIndex(index);
365}
366
367
368DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
369 return DoubleRegister::FromAllocationIndex(index);
370}
371
372
373Register LCodeGen::ToRegister(LOperand* op) const {
374 ASSERT(op->IsRegister());
375 return ToRegister(op->index());
376}
377
378
379Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
380 if (op->IsRegister()) {
381 return ToRegister(op->index());
382 } else if (op->IsConstantOperand()) {
383 __ mov(scratch, ToOperand(op));
384 return scratch;
385 } else if (op->IsStackSlot() || op->IsArgument()) {
386 __ ldr(scratch, ToMemOperand(op));
387 return scratch;
388 }
389 UNREACHABLE();
390 return scratch;
391}
392
393
394DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
395 ASSERT(op->IsDoubleRegister());
396 return ToDoubleRegister(op->index());
397}
398
399
400DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
401 SwVfpRegister flt_scratch,
402 DoubleRegister dbl_scratch) {
403 if (op->IsDoubleRegister()) {
404 return ToDoubleRegister(op->index());
405 } else if (op->IsConstantOperand()) {
406 LConstantOperand* const_op = LConstantOperand::cast(op);
407 Handle<Object> literal = chunk_->LookupLiteral(const_op);
408 Representation r = chunk_->LookupLiteralRepresentation(const_op);
409 if (r.IsInteger32()) {
410 ASSERT(literal->IsNumber());
411 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
412 __ vmov(flt_scratch, ip);
413 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
414 return dbl_scratch;
415 } else if (r.IsDouble()) {
416 Abort("unsupported double immediate");
417 } else if (r.IsTagged()) {
418 Abort("unsupported tagged immediate");
419 }
420 } else if (op->IsStackSlot() || op->IsArgument()) {
421 // TODO(regis): Why is vldr not taking a MemOperand?
422 // __ vldr(dbl_scratch, ToMemOperand(op));
423 MemOperand mem_op = ToMemOperand(op);
424 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
425 return dbl_scratch;
426 }
427 UNREACHABLE();
428 return dbl_scratch;
429}
430
431
432int LCodeGen::ToInteger32(LConstantOperand* op) const {
433 Handle<Object> value = chunk_->LookupLiteral(op);
434 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
435 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
436 value->Number());
437 return static_cast<int32_t>(value->Number());
438}
439
440
441Operand LCodeGen::ToOperand(LOperand* op) {
442 if (op->IsConstantOperand()) {
443 LConstantOperand* const_op = LConstantOperand::cast(op);
444 Handle<Object> literal = chunk_->LookupLiteral(const_op);
445 Representation r = chunk_->LookupLiteralRepresentation(const_op);
446 if (r.IsInteger32()) {
447 ASSERT(literal->IsNumber());
448 return Operand(static_cast<int32_t>(literal->Number()));
449 } else if (r.IsDouble()) {
450 Abort("ToOperand Unsupported double immediate.");
451 }
452 ASSERT(r.IsTagged());
453 return Operand(literal);
454 } else if (op->IsRegister()) {
455 return Operand(ToRegister(op));
456 } else if (op->IsDoubleRegister()) {
457 Abort("ToOperand IsDoubleRegister unimplemented");
458 return Operand(0);
459 }
460 // Stack slots not implemented, use ToMemOperand instead.
461 UNREACHABLE();
462 return Operand(0);
463}
464
465
466MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
467 // TODO(regis): Revisit.
468 ASSERT(!op->IsRegister());
469 ASSERT(!op->IsDoubleRegister());
470 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
471 int index = op->index();
472 if (index >= 0) {
473 // Local or spill slot. Skip the frame pointer, function, and
474 // context in the fixed part of the frame.
475 return MemOperand(fp, -(index + 3) * kPointerSize);
476 } else {
477 // Incoming parameter. Skip the return address.
478 return MemOperand(fp, -(index - 1) * kPointerSize);
479 }
480}
481
482
Ben Murdochb8e0da22011-05-16 14:20:40 +0100483void LCodeGen::WriteTranslation(LEnvironment* environment,
484 Translation* translation) {
485 if (environment == NULL) return;
486
487 // The translation includes one command per value in the environment.
488 int translation_size = environment->values()->length();
489 // The output frame height does not include the parameters.
490 int height = translation_size - environment->parameter_count();
491
492 WriteTranslation(environment->outer(), translation);
493 int closure_id = DefineDeoptimizationLiteral(environment->closure());
494 translation->BeginFrame(environment->ast_id(), closure_id, height);
495 for (int i = 0; i < translation_size; ++i) {
496 LOperand* value = environment->values()->at(i);
497 // spilled_registers_ and spilled_double_registers_ are either
498 // both NULL or both set.
499 if (environment->spilled_registers() != NULL && value != NULL) {
500 if (value->IsRegister() &&
501 environment->spilled_registers()[value->index()] != NULL) {
502 translation->MarkDuplicate();
503 AddToTranslation(translation,
504 environment->spilled_registers()[value->index()],
505 environment->HasTaggedValueAt(i));
506 } else if (
507 value->IsDoubleRegister() &&
508 environment->spilled_double_registers()[value->index()] != NULL) {
509 translation->MarkDuplicate();
510 AddToTranslation(
511 translation,
512 environment->spilled_double_registers()[value->index()],
513 false);
514 }
515 }
516
517 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
518 }
519}
520
521
Ben Murdochb0fe1622011-05-05 13:52:32 +0100522void LCodeGen::AddToTranslation(Translation* translation,
523 LOperand* op,
524 bool is_tagged) {
525 if (op == NULL) {
526 // TODO(twuerthinger): Introduce marker operands to indicate that this value
527 // is not present and must be reconstructed from the deoptimizer. Currently
528 // this is only used for the arguments object.
529 translation->StoreArgumentsObject();
530 } else if (op->IsStackSlot()) {
531 if (is_tagged) {
532 translation->StoreStackSlot(op->index());
533 } else {
534 translation->StoreInt32StackSlot(op->index());
535 }
536 } else if (op->IsDoubleStackSlot()) {
537 translation->StoreDoubleStackSlot(op->index());
538 } else if (op->IsArgument()) {
539 ASSERT(is_tagged);
540 int src_index = StackSlotCount() + op->index();
541 translation->StoreStackSlot(src_index);
542 } else if (op->IsRegister()) {
543 Register reg = ToRegister(op);
544 if (is_tagged) {
545 translation->StoreRegister(reg);
546 } else {
547 translation->StoreInt32Register(reg);
548 }
549 } else if (op->IsDoubleRegister()) {
550 DoubleRegister reg = ToDoubleRegister(op);
551 translation->StoreDoubleRegister(reg);
552 } else if (op->IsConstantOperand()) {
553 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
554 int src_index = DefineDeoptimizationLiteral(literal);
555 translation->StoreLiteral(src_index);
556 } else {
557 UNREACHABLE();
558 }
559}
560
561
562void LCodeGen::CallCode(Handle<Code> code,
563 RelocInfo::Mode mode,
564 LInstruction* instr) {
565 if (instr != NULL) {
566 LPointerMap* pointers = instr->pointer_map();
567 RecordPosition(pointers->position());
568 __ Call(code, mode);
569 RegisterLazyDeoptimization(instr);
570 } else {
571 LPointerMap no_pointers(0);
572 RecordPosition(no_pointers.position());
573 __ Call(code, mode);
574 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
575 }
576}
577
578
579void LCodeGen::CallRuntime(Runtime::Function* function,
580 int num_arguments,
581 LInstruction* instr) {
582 ASSERT(instr != NULL);
583 LPointerMap* pointers = instr->pointer_map();
584 ASSERT(pointers != NULL);
585 RecordPosition(pointers->position());
586
587 __ CallRuntime(function, num_arguments);
588 // Runtime calls to Throw are not supposed to ever return at the
589 // call site, so don't register lazy deoptimization for these. We do
590 // however have to record a safepoint since throwing exceptions can
591 // cause garbage collections.
592 if (!instr->IsThrow()) {
593 RegisterLazyDeoptimization(instr);
594 } else {
595 RecordSafepoint(instr->pointer_map(), Safepoint::kNoDeoptimizationIndex);
596 }
597}
598
599
600void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
601 // Create the environment to bailout to. If the call has side effects
602 // execution has to continue after the call otherwise execution can continue
603 // from a previous bailout point repeating the call.
604 LEnvironment* deoptimization_environment;
605 if (instr->HasDeoptimizationEnvironment()) {
606 deoptimization_environment = instr->deoptimization_environment();
607 } else {
608 deoptimization_environment = instr->environment();
609 }
610
611 RegisterEnvironmentForDeoptimization(deoptimization_environment);
612 RecordSafepoint(instr->pointer_map(),
613 deoptimization_environment->deoptimization_index());
614}
615
616
617void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
618 if (!environment->HasBeenRegistered()) {
619 // Physical stack frame layout:
620 // -x ............. -4 0 ..................................... y
621 // [incoming arguments] [spill slots] [pushed outgoing arguments]
622
623 // Layout of the environment:
624 // 0 ..................................................... size-1
625 // [parameters] [locals] [expression stack including arguments]
626
627 // Layout of the translation:
628 // 0 ........................................................ size - 1 + 4
629 // [expression stack including arguments] [locals] [4 words] [parameters]
630 // |>------------ translation_size ------------<|
631
632 int frame_count = 0;
633 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
634 ++frame_count;
635 }
636 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100637 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100638 int deoptimization_index = deoptimizations_.length();
639 environment->Register(deoptimization_index, translation.index());
640 deoptimizations_.Add(environment);
641 }
642}
643
644
645void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
646 RegisterEnvironmentForDeoptimization(environment);
647 ASSERT(environment->HasBeenRegistered());
648 int id = environment->deoptimization_index();
649 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
650 ASSERT(entry != NULL);
651 if (entry == NULL) {
652 Abort("bailout was not prepared");
653 return;
654 }
655
656 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
657
658 if (FLAG_deopt_every_n_times == 1 &&
659 info_->shared_info()->opt_count() == id) {
660 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
661 return;
662 }
663
664 if (cc == no_condition) {
665 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt");
666 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
667 } else {
668 if (FLAG_trap_on_deopt) {
669 Label done;
670 __ b(&done, NegateCondition(cc));
671 __ stop("trap_on_deopt");
672 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
673 __ bind(&done);
674 } else {
675 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc);
676 }
677 }
678}
679
680
681void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
682 int length = deoptimizations_.length();
683 if (length == 0) return;
684 ASSERT(FLAG_deopt);
685 Handle<DeoptimizationInputData> data =
686 Factory::NewDeoptimizationInputData(length, TENURED);
687
688 data->SetTranslationByteArray(*translations_.CreateByteArray());
689 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
690
691 Handle<FixedArray> literals =
692 Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
693 for (int i = 0; i < deoptimization_literals_.length(); i++) {
694 literals->set(i, *deoptimization_literals_[i]);
695 }
696 data->SetLiteralArray(*literals);
697
698 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
699 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
700
701 // Populate the deoptimization entries.
702 for (int i = 0; i < length; i++) {
703 LEnvironment* env = deoptimizations_[i];
704 data->SetAstId(i, Smi::FromInt(env->ast_id()));
705 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
706 data->SetArgumentsStackHeight(i,
707 Smi::FromInt(env->arguments_stack_height()));
708 }
709 code->set_deoptimization_data(*data);
710}
711
712
713int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
714 int result = deoptimization_literals_.length();
715 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
716 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
717 }
718 deoptimization_literals_.Add(literal);
719 return result;
720}
721
722
723void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
724 ASSERT(deoptimization_literals_.length() == 0);
725
726 const ZoneList<Handle<JSFunction> >* inlined_closures =
727 chunk()->inlined_closures();
728
729 for (int i = 0, length = inlined_closures->length();
730 i < length;
731 i++) {
732 DefineDeoptimizationLiteral(inlined_closures->at(i));
733 }
734
735 inlined_function_count_ = deoptimization_literals_.length();
736}
737
738
739void LCodeGen::RecordSafepoint(LPointerMap* pointers,
740 int deoptimization_index) {
741 const ZoneList<LOperand*>* operands = pointers->operands();
742 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
743 deoptimization_index);
744 for (int i = 0; i < operands->length(); i++) {
745 LOperand* pointer = operands->at(i);
746 if (pointer->IsStackSlot()) {
747 safepoint.DefinePointerSlot(pointer->index());
748 }
749 }
750}
751
752
753void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
754 int arguments,
755 int deoptimization_index) {
756 const ZoneList<LOperand*>* operands = pointers->operands();
757 Safepoint safepoint =
758 safepoints_.DefineSafepointWithRegisters(
759 masm(), arguments, deoptimization_index);
760 for (int i = 0; i < operands->length(); i++) {
761 LOperand* pointer = operands->at(i);
762 if (pointer->IsStackSlot()) {
763 safepoint.DefinePointerSlot(pointer->index());
764 } else if (pointer->IsRegister()) {
765 safepoint.DefinePointerRegister(ToRegister(pointer));
766 }
767 }
768 // Register cp always contains a pointer to the context.
769 safepoint.DefinePointerRegister(cp);
770}
771
772
Ben Murdochb8e0da22011-05-16 14:20:40 +0100773void LCodeGen::RecordSafepointWithRegistersAndDoubles(
774 LPointerMap* pointers,
775 int arguments,
776 int deoptimization_index) {
777 const ZoneList<LOperand*>* operands = pointers->operands();
778 Safepoint safepoint =
779 safepoints_.DefineSafepointWithRegistersAndDoubles(
780 masm(), arguments, deoptimization_index);
781 for (int i = 0; i < operands->length(); i++) {
782 LOperand* pointer = operands->at(i);
783 if (pointer->IsStackSlot()) {
784 safepoint.DefinePointerSlot(pointer->index());
785 } else if (pointer->IsRegister()) {
786 safepoint.DefinePointerRegister(ToRegister(pointer));
787 }
788 }
789 // Register cp always contains a pointer to the context.
790 safepoint.DefinePointerRegister(cp);
791}
792
793
Ben Murdochb0fe1622011-05-05 13:52:32 +0100794void LCodeGen::RecordPosition(int position) {
795 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
796 masm()->positions_recorder()->RecordPosition(position);
797}
798
799
800void LCodeGen::DoLabel(LLabel* label) {
801 if (label->is_loop_header()) {
802 Comment(";;; B%d - LOOP entry", label->block_id());
803 } else {
804 Comment(";;; B%d", label->block_id());
805 }
806 __ bind(label->label());
807 current_block_ = label->block_id();
808 LCodeGen::DoGap(label);
809}
810
811
812void LCodeGen::DoParallelMove(LParallelMove* move) {
813 // d0 must always be a scratch register.
814 DoubleRegister dbl_scratch = d0;
815 LUnallocated marker_operand(LUnallocated::NONE);
816
Steve Block9fac8402011-05-12 15:51:54 +0100817 Register core_scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100818 bool destroys_core_scratch = false;
819
Ben Murdochb8e0da22011-05-16 14:20:40 +0100820 const ZoneList<LMoveOperands>* moves =
821 resolver_.Resolve(move->move_operands(), &marker_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100822 for (int i = moves->length() - 1; i >= 0; --i) {
823 LMoveOperands move = moves->at(i);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100824 LOperand* from = move.source();
825 LOperand* to = move.destination();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100826 ASSERT(!from->IsDoubleRegister() ||
827 !ToDoubleRegister(from).is(dbl_scratch));
828 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(dbl_scratch));
829 ASSERT(!from->IsRegister() || !ToRegister(from).is(core_scratch));
830 ASSERT(!to->IsRegister() || !ToRegister(to).is(core_scratch));
831 if (from == &marker_operand) {
832 if (to->IsRegister()) {
833 __ mov(ToRegister(to), core_scratch);
834 ASSERT(destroys_core_scratch);
835 } else if (to->IsStackSlot()) {
836 __ str(core_scratch, ToMemOperand(to));
837 ASSERT(destroys_core_scratch);
838 } else if (to->IsDoubleRegister()) {
839 __ vmov(ToDoubleRegister(to), dbl_scratch);
840 } else {
841 ASSERT(to->IsDoubleStackSlot());
842 // TODO(regis): Why is vstr not taking a MemOperand?
843 // __ vstr(dbl_scratch, ToMemOperand(to));
844 MemOperand to_operand = ToMemOperand(to);
845 __ vstr(dbl_scratch, to_operand.rn(), to_operand.offset());
846 }
847 } else if (to == &marker_operand) {
848 if (from->IsRegister() || from->IsConstantOperand()) {
849 __ mov(core_scratch, ToOperand(from));
850 destroys_core_scratch = true;
851 } else if (from->IsStackSlot()) {
852 __ ldr(core_scratch, ToMemOperand(from));
853 destroys_core_scratch = true;
854 } else if (from->IsDoubleRegister()) {
855 __ vmov(dbl_scratch, ToDoubleRegister(from));
856 } else {
857 ASSERT(from->IsDoubleStackSlot());
858 // TODO(regis): Why is vldr not taking a MemOperand?
859 // __ vldr(dbl_scratch, ToMemOperand(from));
860 MemOperand from_operand = ToMemOperand(from);
861 __ vldr(dbl_scratch, from_operand.rn(), from_operand.offset());
862 }
863 } else if (from->IsConstantOperand()) {
864 if (to->IsRegister()) {
865 __ mov(ToRegister(to), ToOperand(from));
866 } else {
867 ASSERT(to->IsStackSlot());
868 __ mov(ip, ToOperand(from));
869 __ str(ip, ToMemOperand(to));
870 }
871 } else if (from->IsRegister()) {
872 if (to->IsRegister()) {
873 __ mov(ToRegister(to), ToOperand(from));
874 } else {
875 ASSERT(to->IsStackSlot());
876 __ str(ToRegister(from), ToMemOperand(to));
877 }
878 } else if (to->IsRegister()) {
879 ASSERT(from->IsStackSlot());
880 __ ldr(ToRegister(to), ToMemOperand(from));
881 } else if (from->IsStackSlot()) {
882 ASSERT(to->IsStackSlot());
883 __ ldr(ip, ToMemOperand(from));
884 __ str(ip, ToMemOperand(to));
885 } else if (from->IsDoubleRegister()) {
886 if (to->IsDoubleRegister()) {
887 __ vmov(ToDoubleRegister(to), ToDoubleRegister(from));
888 } else {
889 ASSERT(to->IsDoubleStackSlot());
890 // TODO(regis): Why is vstr not taking a MemOperand?
891 // __ vstr(dbl_scratch, ToMemOperand(to));
892 MemOperand to_operand = ToMemOperand(to);
893 __ vstr(ToDoubleRegister(from), to_operand.rn(), to_operand.offset());
894 }
895 } else if (to->IsDoubleRegister()) {
896 ASSERT(from->IsDoubleStackSlot());
897 // TODO(regis): Why is vldr not taking a MemOperand?
898 // __ vldr(ToDoubleRegister(to), ToMemOperand(from));
899 MemOperand from_operand = ToMemOperand(from);
900 __ vldr(ToDoubleRegister(to), from_operand.rn(), from_operand.offset());
901 } else {
902 ASSERT(to->IsDoubleStackSlot() && from->IsDoubleStackSlot());
903 // TODO(regis): Why is vldr not taking a MemOperand?
904 // __ vldr(dbl_scratch, ToMemOperand(from));
905 MemOperand from_operand = ToMemOperand(from);
906 __ vldr(dbl_scratch, from_operand.rn(), from_operand.offset());
907 // TODO(regis): Why is vstr not taking a MemOperand?
908 // __ vstr(dbl_scratch, ToMemOperand(to));
909 MemOperand to_operand = ToMemOperand(to);
910 __ vstr(dbl_scratch, to_operand.rn(), to_operand.offset());
911 }
912 }
913
914 if (destroys_core_scratch) {
915 __ ldr(core_scratch, MemOperand(fp, -kPointerSize));
916 }
917
918 LInstruction* next = GetNextInstruction();
919 if (next != NULL && next->IsLazyBailout()) {
920 int pc = masm()->pc_offset();
921 safepoints_.SetPcAfterGap(pc);
922 }
923}
924
925
926void LCodeGen::DoGap(LGap* gap) {
927 for (int i = LGap::FIRST_INNER_POSITION;
928 i <= LGap::LAST_INNER_POSITION;
929 i++) {
930 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
931 LParallelMove* move = gap->GetParallelMove(inner_pos);
932 if (move != NULL) DoParallelMove(move);
933 }
934
935 LInstruction* next = GetNextInstruction();
936 if (next != NULL && next->IsLazyBailout()) {
937 int pc = masm()->pc_offset();
938 safepoints_.SetPcAfterGap(pc);
939 }
940}
941
942
943void LCodeGen::DoParameter(LParameter* instr) {
944 // Nothing to do.
945}
946
947
948void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100949 ASSERT(ToRegister(instr->result()).is(r0));
950 switch (instr->hydrogen()->major_key()) {
951 case CodeStub::RegExpConstructResult: {
952 RegExpConstructResultStub stub;
953 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
954 break;
955 }
956 case CodeStub::RegExpExec: {
957 RegExpExecStub stub;
958 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
959 break;
960 }
961 case CodeStub::SubString: {
962 SubStringStub stub;
963 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
964 break;
965 }
966 case CodeStub::StringCharAt: {
967 Abort("StringCharAtStub unimplemented.");
968 break;
969 }
970 case CodeStub::MathPow: {
971 Abort("MathPowStub unimplemented.");
972 break;
973 }
974 case CodeStub::NumberToString: {
975 NumberToStringStub stub;
976 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
977 break;
978 }
979 case CodeStub::StringAdd: {
980 StringAddStub stub(NO_STRING_ADD_FLAGS);
981 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
982 break;
983 }
984 case CodeStub::StringCompare: {
985 StringCompareStub stub;
986 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
987 break;
988 }
989 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100990 __ ldr(r0, MemOperand(sp, 0));
991 TranscendentalCacheStub stub(instr->transcendental_type());
992 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100993 break;
994 }
995 default:
996 UNREACHABLE();
997 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100998}
999
1000
1001void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
1002 // Nothing to do.
1003}
1004
1005
1006void LCodeGen::DoModI(LModI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001007 class DeferredModI: public LDeferredCode {
1008 public:
1009 DeferredModI(LCodeGen* codegen, LModI* instr)
1010 : LDeferredCode(codegen), instr_(instr) { }
1011 virtual void Generate() {
1012 codegen()->DoDeferredGenericBinaryStub(instr_, Token::MOD);
1013 }
1014 private:
1015 LModI* instr_;
1016 };
1017 // These registers hold untagged 32 bit values.
1018 Register left = ToRegister(instr->left());
1019 Register right = ToRegister(instr->right());
1020 Register result = ToRegister(instr->result());
1021 Register scratch = scratch0();
1022
1023 Label deoptimize, done;
1024 // Check for x % 0.
1025 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
1026 __ tst(right, Operand(right));
1027 __ b(eq, &deoptimize);
1028 }
1029
1030 // Check for (0 % -x) that will produce negative zero.
1031 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1032 Label ok;
1033 __ tst(left, Operand(left));
1034 __ b(ne, &ok);
1035 __ tst(right, Operand(right));
1036 __ b(pl, &ok);
1037 __ b(al, &deoptimize);
1038 __ bind(&ok);
1039 }
1040
1041 // Call the generic stub. The numbers in r0 and r1 have
1042 // to be tagged to Smis. If that is not possible, deoptimize.
1043 DeferredModI* deferred = new DeferredModI(this, instr);
1044 __ TrySmiTag(left, &deoptimize, scratch);
1045 __ TrySmiTag(right, &deoptimize, scratch);
1046
1047 __ b(al, deferred->entry());
1048 __ bind(deferred->exit());
1049
1050 // If the result in r0 is a Smi, untag it, else deoptimize.
1051 __ BranchOnNotSmi(result, &deoptimize);
1052 __ mov(result, Operand(result, ASR, 1));
1053
1054 __ b(al, &done);
1055 __ bind(&deoptimize);
1056 DeoptimizeIf(al, instr->environment());
1057 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001058}
1059
1060
1061void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001062 class DeferredDivI: public LDeferredCode {
1063 public:
1064 DeferredDivI(LCodeGen* codegen, LDivI* instr)
1065 : LDeferredCode(codegen), instr_(instr) { }
1066 virtual void Generate() {
1067 codegen()->DoDeferredGenericBinaryStub(instr_, Token::DIV);
1068 }
1069 private:
1070 LDivI* instr_;
1071 };
1072
1073 const Register left = ToRegister(instr->left());
1074 const Register right = ToRegister(instr->right());
1075 const Register scratch = scratch0();
1076 const Register result = ToRegister(instr->result());
1077
1078 // Check for x / 0.
1079 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
1080 __ tst(right, right);
1081 DeoptimizeIf(eq, instr->environment());
1082 }
1083
1084 // Check for (0 / -x) that will produce negative zero.
1085 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1086 Label left_not_zero;
1087 __ tst(left, Operand(left));
1088 __ b(ne, &left_not_zero);
1089 __ tst(right, Operand(right));
1090 DeoptimizeIf(mi, instr->environment());
1091 __ bind(&left_not_zero);
1092 }
1093
1094 // Check for (-kMinInt / -1).
1095 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1096 Label left_not_min_int;
1097 __ cmp(left, Operand(kMinInt));
1098 __ b(ne, &left_not_min_int);
1099 __ cmp(right, Operand(-1));
1100 DeoptimizeIf(eq, instr->environment());
1101 __ bind(&left_not_min_int);
1102 }
1103
1104 Label done, deoptimize;
1105 // Test for a few common cases first.
1106 __ cmp(right, Operand(1));
1107 __ mov(result, left, LeaveCC, eq);
1108 __ b(eq, &done);
1109
1110 __ cmp(right, Operand(2));
1111 __ tst(left, Operand(1), eq);
1112 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1113 __ b(eq, &done);
1114
1115 __ cmp(right, Operand(4));
1116 __ tst(left, Operand(3), eq);
1117 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1118 __ b(eq, &done);
1119
1120 // Call the generic stub. The numbers in r0 and r1 have
1121 // to be tagged to Smis. If that is not possible, deoptimize.
1122 DeferredDivI* deferred = new DeferredDivI(this, instr);
1123
1124 __ TrySmiTag(left, &deoptimize, scratch);
1125 __ TrySmiTag(right, &deoptimize, scratch);
1126
1127 __ b(al, deferred->entry());
1128 __ bind(deferred->exit());
1129
1130 // If the result in r0 is a Smi, untag it, else deoptimize.
1131 __ BranchOnNotSmi(result, &deoptimize);
1132 __ SmiUntag(result);
1133 __ b(&done);
1134
1135 __ bind(&deoptimize);
1136 DeoptimizeIf(al, instr->environment());
1137 __ bind(&done);
1138}
1139
1140
1141void LCodeGen::DoDeferredGenericBinaryStub(LBinaryOperation* instr,
1142 Token::Value op) {
1143 Register left = ToRegister(instr->left());
1144 Register right = ToRegister(instr->right());
1145
1146 __ PushSafepointRegistersAndDoubles();
1147 GenericBinaryOpStub stub(op, OVERWRITE_LEFT, left, right);
1148 __ CallStub(&stub);
1149 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1150 0,
1151 Safepoint::kNoDeoptimizationIndex);
1152 // Overwrite the stored value of r0 with the result of the stub.
1153 __ str(r0, MemOperand(sp, DwVfpRegister::kNumAllocatableRegisters *
1154 kDoubleSize));
1155 __ PopSafepointRegistersAndDoubles();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001156}
1157
1158
1159void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001160 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001161 Register left = ToRegister(instr->left());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001162 Register right = EmitLoadRegister(instr->right(), scratch);
1163
1164 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
1165 !instr->right()->IsConstantOperand()) {
1166 __ orr(ToRegister(instr->temp()), left, right);
1167 }
1168
1169 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1170 // scratch:left = left * right.
1171 __ smull(scratch, left, left, right);
1172 __ mov(ip, Operand(left, ASR, 31));
1173 __ cmp(ip, Operand(scratch));
1174 DeoptimizeIf(ne, instr->environment());
1175 } else {
1176 __ mul(left, left, right);
1177 }
1178
1179 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1180 // Bail out if the result is supposed to be negative zero.
1181 Label done;
1182 __ tst(left, Operand(left));
1183 __ b(ne, &done);
1184 if (instr->right()->IsConstantOperand()) {
1185 if (ToInteger32(LConstantOperand::cast(instr->right())) < 0) {
1186 DeoptimizeIf(no_condition, instr->environment());
1187 }
1188 } else {
1189 // Test the non-zero operand for negative sign.
1190 __ cmp(ToRegister(instr->temp()), Operand(0));
1191 DeoptimizeIf(mi, instr->environment());
1192 }
1193 __ bind(&done);
1194 }
1195}
1196
1197
1198void LCodeGen::DoBitI(LBitI* instr) {
1199 LOperand* left = instr->left();
1200 LOperand* right = instr->right();
1201 ASSERT(left->Equals(instr->result()));
1202 ASSERT(left->IsRegister());
1203 Register result = ToRegister(left);
1204 Register right_reg = EmitLoadRegister(right, ip);
1205 switch (instr->op()) {
1206 case Token::BIT_AND:
1207 __ and_(result, ToRegister(left), Operand(right_reg));
1208 break;
1209 case Token::BIT_OR:
1210 __ orr(result, ToRegister(left), Operand(right_reg));
1211 break;
1212 case Token::BIT_XOR:
1213 __ eor(result, ToRegister(left), Operand(right_reg));
1214 break;
1215 default:
1216 UNREACHABLE();
1217 break;
1218 }
1219}
1220
1221
1222void LCodeGen::DoShiftI(LShiftI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001223 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001224 LOperand* left = instr->left();
1225 LOperand* right = instr->right();
1226 ASSERT(left->Equals(instr->result()));
1227 ASSERT(left->IsRegister());
1228 Register result = ToRegister(left);
1229 if (right->IsRegister()) {
1230 // Mask the right operand.
Steve Block9fac8402011-05-12 15:51:54 +01001231 __ and_(scratch, ToRegister(right), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001232 switch (instr->op()) {
1233 case Token::SAR:
Steve Block9fac8402011-05-12 15:51:54 +01001234 __ mov(result, Operand(result, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001235 break;
1236 case Token::SHR:
1237 if (instr->can_deopt()) {
Steve Block9fac8402011-05-12 15:51:54 +01001238 __ mov(result, Operand(result, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001239 DeoptimizeIf(mi, instr->environment());
1240 } else {
Steve Block9fac8402011-05-12 15:51:54 +01001241 __ mov(result, Operand(result, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001242 }
1243 break;
1244 case Token::SHL:
Steve Block9fac8402011-05-12 15:51:54 +01001245 __ mov(result, Operand(result, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001246 break;
1247 default:
1248 UNREACHABLE();
1249 break;
1250 }
1251 } else {
1252 int value = ToInteger32(LConstantOperand::cast(right));
1253 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1254 switch (instr->op()) {
1255 case Token::SAR:
1256 if (shift_count != 0) {
1257 __ mov(result, Operand(result, ASR, shift_count));
1258 }
1259 break;
1260 case Token::SHR:
1261 if (shift_count == 0 && instr->can_deopt()) {
1262 __ tst(result, Operand(0x80000000));
1263 DeoptimizeIf(ne, instr->environment());
1264 } else {
1265 __ mov(result, Operand(result, LSR, shift_count));
1266 }
1267 break;
1268 case Token::SHL:
1269 if (shift_count != 0) {
1270 __ mov(result, Operand(result, LSL, shift_count));
1271 }
1272 break;
1273 default:
1274 UNREACHABLE();
1275 break;
1276 }
1277 }
1278}
1279
1280
1281void LCodeGen::DoSubI(LSubI* instr) {
1282 Register left = ToRegister(instr->left());
1283 Register right = EmitLoadRegister(instr->right(), ip);
1284 ASSERT(instr->left()->Equals(instr->result()));
1285 __ sub(left, left, right, SetCC);
1286 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1287 DeoptimizeIf(vs, instr->environment());
1288 }
1289}
1290
1291
1292void LCodeGen::DoConstantI(LConstantI* instr) {
1293 ASSERT(instr->result()->IsRegister());
1294 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1295}
1296
1297
1298void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001299 ASSERT(instr->result()->IsDoubleRegister());
1300 DwVfpRegister result = ToDoubleRegister(instr->result());
1301 double v = instr->value();
1302 __ vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001303}
1304
1305
1306void LCodeGen::DoConstantT(LConstantT* instr) {
1307 ASSERT(instr->result()->IsRegister());
1308 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1309}
1310
1311
Steve Block9fac8402011-05-12 15:51:54 +01001312void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001313 Register result = ToRegister(instr->result());
Steve Block9fac8402011-05-12 15:51:54 +01001314 Register array = ToRegister(instr->input());
1315 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1316}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001317
Ben Murdochb0fe1622011-05-05 13:52:32 +01001318
Steve Block9fac8402011-05-12 15:51:54 +01001319void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1320 Register result = ToRegister(instr->result());
1321 Register array = ToRegister(instr->input());
1322 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001323}
1324
1325
1326void LCodeGen::DoValueOf(LValueOf* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001327 Register input = ToRegister(instr->input());
1328 Register result = ToRegister(instr->result());
1329 Register map = ToRegister(instr->temporary());
1330 ASSERT(input.is(result));
1331 Label done;
1332
1333 // If the object is a smi return the object.
1334 __ tst(input, Operand(kSmiTagMask));
1335 __ b(eq, &done);
1336
1337 // If the object is not a value type, return the object.
1338 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1339 __ b(ne, &done);
1340 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1341
1342 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001343}
1344
1345
1346void LCodeGen::DoBitNotI(LBitNotI* instr) {
1347 LOperand* input = instr->input();
1348 ASSERT(input->Equals(instr->result()));
1349 __ mvn(ToRegister(input), Operand(ToRegister(input)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001350}
1351
1352
1353void LCodeGen::DoThrow(LThrow* instr) {
1354 Register input_reg = EmitLoadRegister(instr->input(), ip);
1355 __ push(input_reg);
1356 CallRuntime(Runtime::kThrow, 1, instr);
1357
1358 if (FLAG_debug_code) {
1359 __ stop("Unreachable code.");
1360 }
1361}
1362
1363
1364void LCodeGen::DoAddI(LAddI* instr) {
1365 LOperand* left = instr->left();
1366 LOperand* right = instr->right();
1367 ASSERT(left->Equals(instr->result()));
1368
1369 Register right_reg = EmitLoadRegister(right, ip);
1370 __ add(ToRegister(left), ToRegister(left), Operand(right_reg), SetCC);
1371
1372 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1373 DeoptimizeIf(vs, instr->environment());
1374 }
1375}
1376
1377
1378void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1379 DoubleRegister left = ToDoubleRegister(instr->left());
1380 DoubleRegister right = ToDoubleRegister(instr->right());
1381 switch (instr->op()) {
1382 case Token::ADD:
1383 __ vadd(left, left, right);
1384 break;
1385 case Token::SUB:
1386 __ vsub(left, left, right);
1387 break;
1388 case Token::MUL:
1389 __ vmul(left, left, right);
1390 break;
1391 case Token::DIV:
1392 __ vdiv(left, left, right);
1393 break;
1394 case Token::MOD: {
1395 Abort("DoArithmeticD unimplemented for MOD.");
1396 break;
1397 }
1398 default:
1399 UNREACHABLE();
1400 break;
1401 }
1402}
1403
1404
1405void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1406 ASSERT(ToRegister(instr->left()).is(r1));
1407 ASSERT(ToRegister(instr->right()).is(r0));
1408 ASSERT(ToRegister(instr->result()).is(r0));
1409
1410 // TODO(regis): Implement TypeRecordingBinaryOpStub and replace current
1411 // GenericBinaryOpStub:
1412 // TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1413 GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, r1, r0);
1414 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1415}
1416
1417
1418int LCodeGen::GetNextEmittedBlock(int block) {
1419 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1420 LLabel* label = chunk_->GetLabel(i);
1421 if (!label->HasReplacement()) return i;
1422 }
1423 return -1;
1424}
1425
1426
1427void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1428 int next_block = GetNextEmittedBlock(current_block_);
1429 right_block = chunk_->LookupDestination(right_block);
1430 left_block = chunk_->LookupDestination(left_block);
1431
1432 if (right_block == left_block) {
1433 EmitGoto(left_block);
1434 } else if (left_block == next_block) {
1435 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1436 } else if (right_block == next_block) {
1437 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1438 } else {
1439 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1440 __ b(chunk_->GetAssemblyLabel(right_block));
1441 }
1442}
1443
1444
1445void LCodeGen::DoBranch(LBranch* instr) {
1446 int true_block = chunk_->LookupDestination(instr->true_block_id());
1447 int false_block = chunk_->LookupDestination(instr->false_block_id());
1448
1449 Representation r = instr->hydrogen()->representation();
1450 if (r.IsInteger32()) {
1451 Register reg = ToRegister(instr->input());
1452 __ cmp(reg, Operand(0));
1453 EmitBranch(true_block, false_block, nz);
1454 } else if (r.IsDouble()) {
1455 DoubleRegister reg = ToDoubleRegister(instr->input());
Ben Murdoch086aeea2011-05-13 15:57:08 +01001456 Register scratch = scratch0();
1457
Ben Murdochb8e0da22011-05-16 14:20:40 +01001458 // Test the double value. Zero and NaN are false.
1459 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1460 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001461 EmitBranch(true_block, false_block, ne);
1462 } else {
1463 ASSERT(r.IsTagged());
1464 Register reg = ToRegister(instr->input());
1465 if (instr->hydrogen()->type().IsBoolean()) {
1466 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1467 __ cmp(reg, ip);
1468 EmitBranch(true_block, false_block, eq);
1469 } else {
1470 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1471 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1472
1473 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1474 __ cmp(reg, ip);
1475 __ b(eq, false_label);
1476 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1477 __ cmp(reg, ip);
1478 __ b(eq, true_label);
1479 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1480 __ cmp(reg, ip);
1481 __ b(eq, false_label);
1482 __ cmp(reg, Operand(0));
1483 __ b(eq, false_label);
1484 __ tst(reg, Operand(kSmiTagMask));
1485 __ b(eq, true_label);
1486
Ben Murdochb8e0da22011-05-16 14:20:40 +01001487 // Test double values. Zero and NaN are false.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001488 Label call_stub;
1489 DoubleRegister dbl_scratch = d0;
Steve Block9fac8402011-05-12 15:51:54 +01001490 Register scratch = scratch0();
1491 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001492 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01001493 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001494 __ b(ne, &call_stub);
1495 __ sub(ip, reg, Operand(kHeapObjectTag));
1496 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001497 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1498 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001499 __ b(ne, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001500 __ b(true_label);
1501
1502 // The conversion stub doesn't cause garbage collections so it's
1503 // safe to not record a safepoint after the call.
1504 __ bind(&call_stub);
1505 ToBooleanStub stub(reg);
1506 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1507 __ stm(db_w, sp, saved_regs);
1508 __ CallStub(&stub);
1509 __ cmp(reg, Operand(0));
1510 __ ldm(ia_w, sp, saved_regs);
1511 EmitBranch(true_block, false_block, nz);
1512 }
1513 }
1514}
1515
1516
1517void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001518 block = chunk_->LookupDestination(block);
1519 int next_block = GetNextEmittedBlock(current_block_);
1520 if (block != next_block) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001521 // Perform stack overflow check if this goto needs it before jumping.
1522 if (deferred_stack_check != NULL) {
1523 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1524 __ cmp(sp, Operand(ip));
1525 __ b(hs, chunk_->GetAssemblyLabel(block));
1526 __ jmp(deferred_stack_check->entry());
1527 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1528 } else {
1529 __ jmp(chunk_->GetAssemblyLabel(block));
1530 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001531 }
1532}
1533
1534
1535void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001536 __ PushSafepointRegisters();
1537 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1538 RecordSafepointWithRegisters(
1539 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1540 __ PopSafepointRegisters();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001541}
1542
1543
1544void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001545 class DeferredStackCheck: public LDeferredCode {
1546 public:
1547 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1548 : LDeferredCode(codegen), instr_(instr) { }
1549 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1550 private:
1551 LGoto* instr_;
1552 };
1553
1554 DeferredStackCheck* deferred = NULL;
1555 if (instr->include_stack_check()) {
1556 deferred = new DeferredStackCheck(this, instr);
1557 }
1558 EmitGoto(instr->block_id(), deferred);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001559}
1560
1561
1562Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1563 Condition cond = no_condition;
1564 switch (op) {
1565 case Token::EQ:
1566 case Token::EQ_STRICT:
1567 cond = eq;
1568 break;
1569 case Token::LT:
1570 cond = is_unsigned ? lo : lt;
1571 break;
1572 case Token::GT:
1573 cond = is_unsigned ? hi : gt;
1574 break;
1575 case Token::LTE:
1576 cond = is_unsigned ? ls : le;
1577 break;
1578 case Token::GTE:
1579 cond = is_unsigned ? hs : ge;
1580 break;
1581 case Token::IN:
1582 case Token::INSTANCEOF:
1583 default:
1584 UNREACHABLE();
1585 }
1586 return cond;
1587}
1588
1589
1590void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1591 __ cmp(ToRegister(left), ToOperand(right));
1592 Abort("EmitCmpI untested.");
1593}
1594
1595
1596void LCodeGen::DoCmpID(LCmpID* instr) {
1597 Abort("DoCmpID unimplemented.");
1598}
1599
1600
1601void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1602 Abort("DoCmpIDAndBranch unimplemented.");
1603}
1604
1605
1606void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1607 Register left = ToRegister(instr->left());
1608 Register right = ToRegister(instr->right());
1609 Register result = ToRegister(instr->result());
1610
1611 __ cmp(left, Operand(right));
1612 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1613 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1614 Abort("DoCmpJSObjectEq untested.");
1615}
1616
1617
1618void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1619 Abort("DoCmpJSObjectEqAndBranch unimplemented.");
1620}
1621
1622
1623void LCodeGen::DoIsNull(LIsNull* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001624 Register reg = ToRegister(instr->input());
1625 Register result = ToRegister(instr->result());
1626
1627 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1628 __ cmp(reg, ip);
1629 if (instr->is_strict()) {
1630 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1631 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1632 } else {
1633 Label true_value, false_value, done;
1634 __ b(eq, &true_value);
1635 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1636 __ cmp(ip, reg);
1637 __ b(eq, &true_value);
1638 __ tst(reg, Operand(kSmiTagMask));
1639 __ b(eq, &false_value);
1640 // Check for undetectable objects by looking in the bit field in
1641 // the map. The object has already been smi checked.
1642 Register scratch = result;
1643 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1644 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1645 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1646 __ b(ne, &true_value);
1647 __ bind(&false_value);
1648 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1649 __ jmp(&done);
1650 __ bind(&true_value);
1651 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1652 __ bind(&done);
1653 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001654}
1655
1656
1657void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001658 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001659 Register reg = ToRegister(instr->input());
1660
1661 // TODO(fsc): If the expression is known to be a smi, then it's
1662 // definitely not null. Jump to the false block.
1663
1664 int true_block = chunk_->LookupDestination(instr->true_block_id());
1665 int false_block = chunk_->LookupDestination(instr->false_block_id());
1666
1667 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1668 __ cmp(reg, ip);
1669 if (instr->is_strict()) {
1670 EmitBranch(true_block, false_block, eq);
1671 } else {
1672 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1673 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1674 __ b(eq, true_label);
1675 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1676 __ cmp(reg, ip);
1677 __ b(eq, true_label);
1678 __ tst(reg, Operand(kSmiTagMask));
1679 __ b(eq, false_label);
1680 // Check for undetectable objects by looking in the bit field in
1681 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001682 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1683 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1684 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1685 EmitBranch(true_block, false_block, ne);
1686 }
1687}
1688
1689
1690Condition LCodeGen::EmitIsObject(Register input,
1691 Register temp1,
1692 Register temp2,
1693 Label* is_not_object,
1694 Label* is_object) {
1695 Abort("EmitIsObject unimplemented.");
1696 return ne;
1697}
1698
1699
1700void LCodeGen::DoIsObject(LIsObject* instr) {
1701 Abort("DoIsObject unimplemented.");
1702}
1703
1704
1705void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1706 Abort("DoIsObjectAndBranch unimplemented.");
1707}
1708
1709
1710void LCodeGen::DoIsSmi(LIsSmi* instr) {
1711 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1712 Register result = ToRegister(instr->result());
1713 Register input_reg = EmitLoadRegister(instr->input(), ip);
1714 __ tst(input_reg, Operand(kSmiTagMask));
1715 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1716 Label done;
1717 __ b(eq, &done);
1718 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1719 __ bind(&done);
1720}
1721
1722
1723void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1724 int true_block = chunk_->LookupDestination(instr->true_block_id());
1725 int false_block = chunk_->LookupDestination(instr->false_block_id());
1726
1727 Register input_reg = EmitLoadRegister(instr->input(), ip);
1728 __ tst(input_reg, Operand(kSmiTagMask));
1729 EmitBranch(true_block, false_block, eq);
1730}
1731
1732
1733InstanceType LHasInstanceType::TestType() {
1734 InstanceType from = hydrogen()->from();
1735 InstanceType to = hydrogen()->to();
1736 if (from == FIRST_TYPE) return to;
1737 ASSERT(from == to || to == LAST_TYPE);
1738 return from;
1739}
1740
1741
1742Condition LHasInstanceType::BranchCondition() {
1743 InstanceType from = hydrogen()->from();
1744 InstanceType to = hydrogen()->to();
1745 if (from == to) return eq;
1746 if (to == LAST_TYPE) return hs;
1747 if (from == FIRST_TYPE) return ls;
1748 UNREACHABLE();
1749 return eq;
1750}
1751
1752
1753void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1754 Abort("DoHasInstanceType unimplemented.");
1755}
1756
1757
1758void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001759 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001760 Register input = ToRegister(instr->input());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001761
1762 int true_block = chunk_->LookupDestination(instr->true_block_id());
1763 int false_block = chunk_->LookupDestination(instr->false_block_id());
1764
1765 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1766
1767 __ tst(input, Operand(kSmiTagMask));
1768 __ b(eq, false_label);
1769
Steve Block9fac8402011-05-12 15:51:54 +01001770 __ CompareObjectType(input, scratch, scratch, instr->TestType());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001771 EmitBranch(true_block, false_block, instr->BranchCondition());
1772}
1773
1774
1775void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1776 Abort("DoHasCachedArrayIndex unimplemented.");
1777}
1778
1779
1780void LCodeGen::DoHasCachedArrayIndexAndBranch(
1781 LHasCachedArrayIndexAndBranch* instr) {
1782 Abort("DoHasCachedArrayIndexAndBranch unimplemented.");
1783}
1784
1785
Ben Murdochb8e0da22011-05-16 14:20:40 +01001786// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001787// the temp registers, but not the input. Only input and temp2 may alias.
1788void LCodeGen::EmitClassOfTest(Label* is_true,
1789 Label* is_false,
1790 Handle<String>class_name,
1791 Register input,
1792 Register temp,
1793 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001794 ASSERT(!input.is(temp));
1795 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1796 __ tst(input, Operand(kSmiTagMask));
1797 __ b(eq, is_false);
1798 __ CompareObjectType(input, temp, temp2, FIRST_JS_OBJECT_TYPE);
1799 __ b(lt, is_false);
1800
1801 // Map is now in temp.
1802 // Functions have class 'Function'.
1803 __ CompareInstanceType(temp, temp2, JS_FUNCTION_TYPE);
1804 if (class_name->IsEqualTo(CStrVector("Function"))) {
1805 __ b(eq, is_true);
1806 } else {
1807 __ b(eq, is_false);
1808 }
1809
1810 // Check if the constructor in the map is a function.
1811 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1812
1813 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1814 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1815 // LAST_JS_OBJECT_TYPE.
1816 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1817 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1818
1819 // Objects with a non-function constructor have class 'Object'.
1820 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1821 if (class_name->IsEqualTo(CStrVector("Object"))) {
1822 __ b(ne, is_true);
1823 } else {
1824 __ b(ne, is_false);
1825 }
1826
1827 // temp now contains the constructor function. Grab the
1828 // instance class name from there.
1829 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1830 __ ldr(temp, FieldMemOperand(temp,
1831 SharedFunctionInfo::kInstanceClassNameOffset));
1832 // The class name we are testing against is a symbol because it's a literal.
1833 // The name in the constructor is a symbol because of the way the context is
1834 // booted. This routine isn't expected to work for random API-created
1835 // classes and it doesn't have to because you can't access it with natives
1836 // syntax. Since both sides are symbols it is sufficient to use an identity
1837 // comparison.
1838 __ cmp(temp, Operand(class_name));
1839 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001840}
1841
1842
1843void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001844 Register input = ToRegister(instr->input());
1845 Register result = ToRegister(instr->result());
1846 ASSERT(input.is(result));
1847 Handle<String> class_name = instr->hydrogen()->class_name();
1848
1849 Label done, is_true, is_false;
1850
1851 EmitClassOfTest(&is_true, &is_false, class_name, input, scratch0(), input);
1852 __ b(ne, &is_false);
1853
1854 __ bind(&is_true);
1855 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1856 __ jmp(&done);
1857
1858 __ bind(&is_false);
1859 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1860 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001861}
1862
1863
1864void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001865 Register input = ToRegister(instr->input());
1866 Register temp = scratch0();
1867 Register temp2 = ToRegister(instr->temporary());
1868 Handle<String> class_name = instr->hydrogen()->class_name();
1869
1870 int true_block = chunk_->LookupDestination(instr->true_block_id());
1871 int false_block = chunk_->LookupDestination(instr->false_block_id());
1872
1873 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1874 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1875
1876 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1877
1878 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001879}
1880
1881
1882void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001883 Register reg = ToRegister(instr->input());
1884 Register temp = ToRegister(instr->temp());
1885 int true_block = instr->true_block_id();
1886 int false_block = instr->false_block_id();
1887
1888 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1889 __ cmp(temp, Operand(instr->map()));
1890 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001891}
1892
1893
1894void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001895 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
1896 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
1897
Ben Murdochb0fe1622011-05-05 13:52:32 +01001898 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1899 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1900
1901 Label true_value, done;
1902 __ tst(r0, r0);
Steve Block9fac8402011-05-12 15:51:54 +01001903 __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne);
1904 __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001905}
1906
1907
1908void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1909 Abort("DoInstanceOfAndBranch unimplemented.");
1910}
1911
1912
Ben Murdoch086aeea2011-05-13 15:57:08 +01001913void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1914 Abort("DoInstanceOfKnownGlobal unimplemented.");
1915}
1916
Ben Murdochb0fe1622011-05-05 13:52:32 +01001917
1918static Condition ComputeCompareCondition(Token::Value op) {
1919 switch (op) {
1920 case Token::EQ_STRICT:
1921 case Token::EQ:
1922 return eq;
1923 case Token::LT:
1924 return lt;
1925 case Token::GT:
1926 return gt;
1927 case Token::LTE:
1928 return le;
1929 case Token::GTE:
1930 return ge;
1931 default:
1932 UNREACHABLE();
1933 return no_condition;
1934 }
1935}
1936
1937
1938void LCodeGen::DoCmpT(LCmpT* instr) {
1939 Token::Value op = instr->op();
1940
1941 Handle<Code> ic = CompareIC::GetUninitialized(op);
1942 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1943
1944 Condition condition = ComputeCompareCondition(op);
1945 if (op == Token::GT || op == Token::LTE) {
1946 condition = ReverseCondition(condition);
1947 }
1948 __ cmp(r0, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001949 __ LoadRoot(ToRegister(instr->result()),
1950 Heap::kTrueValueRootIndex,
1951 condition);
1952 __ LoadRoot(ToRegister(instr->result()),
1953 Heap::kFalseValueRootIndex,
1954 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001955}
1956
1957
1958void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1959 Abort("DoCmpTAndBranch unimplemented.");
1960}
1961
1962
1963void LCodeGen::DoReturn(LReturn* instr) {
1964 if (FLAG_trace) {
1965 // Push the return value on the stack as the parameter.
1966 // Runtime::TraceExit returns its parameter in r0.
1967 __ push(r0);
1968 __ CallRuntime(Runtime::kTraceExit, 1);
1969 }
1970 int32_t sp_delta = (ParameterCount() + 1) * kPointerSize;
1971 __ mov(sp, fp);
1972 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1973 __ add(sp, sp, Operand(sp_delta));
1974 __ Jump(lr);
1975}
1976
1977
1978void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
1979 Register result = ToRegister(instr->result());
1980 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
1981 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
1982 if (instr->hydrogen()->check_hole_value()) {
1983 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1984 __ cmp(result, ip);
1985 DeoptimizeIf(eq, instr->environment());
1986 }
1987}
1988
1989
1990void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
1991 Register value = ToRegister(instr->input());
1992 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
1993 __ str(value, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
1994}
1995
1996
Ben Murdochb8e0da22011-05-16 14:20:40 +01001997void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
1998 // TODO(antonm): load a context with a separate instruction.
1999 Register result = ToRegister(instr->result());
2000 __ LoadContext(result, instr->context_chain_length());
2001 __ ldr(result, ContextOperand(result, instr->slot_index()));
2002}
2003
2004
Ben Murdochb0fe1622011-05-05 13:52:32 +01002005void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002006 Register object = ToRegister(instr->input());
2007 Register result = ToRegister(instr->result());
2008 if (instr->hydrogen()->is_in_object()) {
2009 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2010 } else {
2011 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2012 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2013 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002014}
2015
2016
2017void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2018 ASSERT(ToRegister(instr->object()).is(r0));
2019 ASSERT(ToRegister(instr->result()).is(r0));
2020
2021 // Name is always in r2.
2022 __ mov(r2, Operand(instr->name()));
2023 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
2024 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2025}
2026
2027
Steve Block9fac8402011-05-12 15:51:54 +01002028void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2029 Register scratch = scratch0();
2030 Register function = ToRegister(instr->function());
2031 Register result = ToRegister(instr->result());
2032
2033 // Check that the function really is a function. Load map into the
2034 // result register.
2035 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2036 DeoptimizeIf(ne, instr->environment());
2037
2038 // Make sure that the function has an instance prototype.
2039 Label non_instance;
2040 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2041 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2042 __ b(ne, &non_instance);
2043
2044 // Get the prototype or initial map from the function.
2045 __ ldr(result,
2046 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2047
2048 // Check that the function has a prototype or an initial map.
2049 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2050 __ cmp(result, ip);
2051 DeoptimizeIf(eq, instr->environment());
2052
2053 // If the function does not have an initial map, we're done.
2054 Label done;
2055 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2056 __ b(ne, &done);
2057
2058 // Get the prototype from the initial map.
2059 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2060 __ jmp(&done);
2061
2062 // Non-instance prototype: Fetch prototype from constructor field
2063 // in initial map.
2064 __ bind(&non_instance);
2065 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2066
2067 // All done.
2068 __ bind(&done);
2069}
2070
2071
Ben Murdochb0fe1622011-05-05 13:52:32 +01002072void LCodeGen::DoLoadElements(LLoadElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002073 ASSERT(instr->result()->Equals(instr->input()));
2074 Register reg = ToRegister(instr->input());
2075 Register scratch = scratch0();
2076
2077 __ ldr(reg, FieldMemOperand(reg, JSObject::kElementsOffset));
2078 if (FLAG_debug_code) {
2079 Label done;
2080 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
2081 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2082 __ cmp(scratch, ip);
2083 __ b(eq, &done);
2084 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2085 __ cmp(scratch, ip);
2086 __ Check(eq, "Check for fast elements failed.");
2087 __ bind(&done);
2088 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002089}
2090
2091
2092void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002093 Register arguments = ToRegister(instr->arguments());
2094 Register length = ToRegister(instr->length());
2095 Register index = ToRegister(instr->index());
2096 Register result = ToRegister(instr->result());
2097
2098 // Bailout index is not a valid argument index. Use unsigned check to get
2099 // negative check for free.
2100 __ sub(length, length, index, SetCC);
2101 DeoptimizeIf(ls, instr->environment());
2102
2103 // There are two words between the frame pointer and the last argument.
2104 // Subtracting from length accounts for one of them add one more.
2105 __ add(length, length, Operand(1));
2106 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002107}
2108
2109
2110void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002111 Register elements = ToRegister(instr->elements());
2112 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002113 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002114 Register scratch = scratch0();
Ben Murdochb8e0da22011-05-16 14:20:40 +01002115 ASSERT(result.is(elements));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002116
2117 // Load the result.
2118 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2119 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2120
Ben Murdochb8e0da22011-05-16 14:20:40 +01002121 // Check for the hole value.
2122 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2123 __ cmp(result, scratch);
2124 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002125}
2126
2127
2128void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2129 ASSERT(ToRegister(instr->object()).is(r1));
2130 ASSERT(ToRegister(instr->key()).is(r0));
2131
2132 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2133 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2134}
2135
2136
2137void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002138 Register scratch = scratch0();
2139 Register result = ToRegister(instr->result());
2140
2141 // Check if the calling frame is an arguments adaptor frame.
2142 Label done, adapted;
2143 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2144 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2145 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2146
2147 // Result is the frame pointer for the frame if not adapted and for the real
2148 // frame below the adaptor frame if adapted.
2149 __ mov(result, fp, LeaveCC, ne);
2150 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002151}
2152
2153
2154void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002155 Register elem = ToRegister(instr->input());
2156 Register result = ToRegister(instr->result());
2157
2158 Label done;
2159
2160 // If no arguments adaptor frame the number of arguments is fixed.
2161 __ cmp(fp, elem);
2162 __ mov(result, Operand(scope()->num_parameters()));
2163 __ b(eq, &done);
2164
2165 // Arguments adaptor frame present. Get argument length from there.
2166 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2167 __ ldr(result,
2168 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2169 __ SmiUntag(result);
2170
2171 // Argument length is in result register.
2172 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002173}
2174
2175
2176void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002177 Register receiver = ToRegister(instr->receiver());
2178 Register function = ToRegister(instr->function());
2179 Register scratch = scratch0();
2180
2181 ASSERT(receiver.is(r0));
2182 ASSERT(function.is(r1));
2183 ASSERT(ToRegister(instr->result()).is(r0));
2184
2185 // If the receiver is null or undefined, we have to pass the
2186 // global object as a receiver.
2187 Label global_receiver, receiver_ok;
2188 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2189 __ cmp(receiver, scratch);
2190 __ b(eq, &global_receiver);
2191 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2192 __ cmp(receiver, scratch);
2193 __ b(ne, &receiver_ok);
2194 __ bind(&global_receiver);
2195 __ ldr(receiver, GlobalObjectOperand());
2196 __ bind(&receiver_ok);
2197
2198 Register length = ToRegister(instr->length());
2199 Register elements = ToRegister(instr->elements());
2200
2201 Label invoke;
2202
2203 // Copy the arguments to this function possibly from the
2204 // adaptor frame below it.
2205 const uint32_t kArgumentsLimit = 1 * KB;
2206 __ cmp(length, Operand(kArgumentsLimit));
2207 DeoptimizeIf(hi, instr->environment());
2208
2209 // Push the receiver and use the register to keep the original
2210 // number of arguments.
2211 __ push(receiver);
2212 __ mov(receiver, length);
2213 // The arguments are at a one pointer size offset from elements.
2214 __ add(elements, elements, Operand(1 * kPointerSize));
2215
2216 // Loop through the arguments pushing them onto the execution
2217 // stack.
2218 Label loop;
2219 // length is a small non-negative integer, due to the test above.
2220 __ tst(length, Operand(length));
2221 __ b(eq, &invoke);
2222 __ bind(&loop);
2223 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2224 __ push(scratch);
2225 __ sub(length, length, Operand(1), SetCC);
2226 __ b(ne, &loop);
2227
2228 __ bind(&invoke);
2229 // Invoke the function. The number of arguments is stored in receiver
2230 // which is r0, as expected by InvokeFunction.
2231 v8::internal::ParameterCount actual(receiver);
2232 SafepointGenerator safepoint_generator(this,
2233 instr->pointer_map(),
2234 Safepoint::kNoDeoptimizationIndex);
2235 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002236}
2237
2238
2239void LCodeGen::DoPushArgument(LPushArgument* instr) {
2240 LOperand* argument = instr->input();
2241 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2242 Abort("DoPushArgument not implemented for double type.");
2243 } else {
2244 Register argument_reg = EmitLoadRegister(argument, ip);
2245 __ push(argument_reg);
2246 }
2247}
2248
2249
2250void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2251 Register result = ToRegister(instr->result());
2252 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2253}
2254
2255
2256void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2257 Register result = ToRegister(instr->result());
2258 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2259 __ ldr(result, FieldMemOperand(result, GlobalObject::kGlobalReceiverOffset));
2260}
2261
2262
2263void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2264 int arity,
2265 LInstruction* instr) {
2266 // Change context if needed.
2267 bool change_context =
2268 (graph()->info()->closure()->context() != function->context()) ||
2269 scope()->contains_with() ||
2270 (scope()->num_heap_slots() > 0);
2271 if (change_context) {
2272 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2273 }
2274
2275 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2276 // is available to write to at this point.
2277 if (!function->NeedsArgumentsAdaption()) {
2278 __ mov(r0, Operand(arity));
2279 }
2280
2281 LPointerMap* pointers = instr->pointer_map();
2282 RecordPosition(pointers->position());
2283
2284 // Invoke function.
2285 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2286 __ Call(ip);
2287
2288 // Setup deoptimization.
2289 RegisterLazyDeoptimization(instr);
2290
2291 // Restore context.
2292 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2293}
2294
2295
2296void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002297 ASSERT(ToRegister(instr->result()).is(r0));
2298 __ mov(r1, Operand(instr->function()));
2299 CallKnownFunction(instr->function(), instr->arity(), instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002300}
2301
2302
2303void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2304 Abort("DoDeferredMathAbsTaggedHeapNumber unimplemented.");
2305}
2306
2307
2308void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2309 Abort("DoMathAbs unimplemented.");
2310}
2311
2312
2313void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002314 DoubleRegister input = ToDoubleRegister(instr->input());
2315 Register result = ToRegister(instr->result());
2316 Register prev_fpscr = ToRegister(instr->temp());
2317 SwVfpRegister single_scratch = double_scratch0().low();
2318 Register scratch = scratch0();
2319
2320 // Set custom FPCSR:
2321 // - Set rounding mode to "Round towards Minus Infinity".
2322 // - Clear vfp cumulative exception flags.
2323 // - Make sure Flush-to-zero mode control bit is unset.
2324 __ vmrs(prev_fpscr);
2325 __ bic(scratch, prev_fpscr,
2326 Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
2327 __ orr(scratch, scratch, Operand(kVFPRoundToMinusInfinityBits));
2328 __ vmsr(scratch);
2329
2330 // Convert the argument to an integer.
2331 __ vcvt_s32_f64(single_scratch,
2332 input,
2333 Assembler::FPSCRRounding,
2334 al);
2335
2336 // Retrieve FPSCR and check for vfp exceptions.
2337 __ vmrs(scratch);
2338 // Restore FPSCR
2339 __ vmsr(prev_fpscr);
2340 __ tst(scratch, Operand(kVFPExceptionMask));
2341 DeoptimizeIf(ne, instr->environment());
2342
2343 // Move the result back to general purpose register r0.
2344 __ vmov(result, single_scratch);
2345
2346 // Test for -0.
2347 Label done;
2348 __ cmp(result, Operand(0));
2349 __ b(ne, &done);
2350 __ vmov(scratch, input.high());
2351 __ tst(scratch, Operand(HeapNumber::kSignMask));
2352 DeoptimizeIf(ne, instr->environment());
2353 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002354}
2355
2356
2357void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002358 DoubleRegister input = ToDoubleRegister(instr->input());
2359 ASSERT(ToDoubleRegister(instr->result()).is(input));
2360 __ vsqrt(input, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002361}
2362
2363
2364void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2365 switch (instr->op()) {
2366 case kMathAbs:
2367 DoMathAbs(instr);
2368 break;
2369 case kMathFloor:
2370 DoMathFloor(instr);
2371 break;
2372 case kMathSqrt:
2373 DoMathSqrt(instr);
2374 break;
2375 default:
2376 Abort("Unimplemented type of LUnaryMathOperation.");
2377 UNREACHABLE();
2378 }
2379}
2380
2381
2382void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002383 ASSERT(ToRegister(instr->result()).is(r0));
2384
2385 int arity = instr->arity();
2386 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2387 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2388 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002389}
2390
2391
2392void LCodeGen::DoCallNamed(LCallNamed* instr) {
2393 ASSERT(ToRegister(instr->result()).is(r0));
2394
2395 int arity = instr->arity();
2396 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2397 __ mov(r2, Operand(instr->name()));
2398 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2399 // Restore context register.
2400 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2401}
2402
2403
2404void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002405 ASSERT(ToRegister(instr->result()).is(r0));
2406
2407 int arity = instr->arity();
2408 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2409 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2410 __ Drop(1);
2411 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002412}
2413
2414
2415void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002416 ASSERT(ToRegister(instr->result()).is(r0));
2417
2418 int arity = instr->arity();
2419 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2420 __ mov(r2, Operand(instr->name()));
2421 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2422 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002423}
2424
2425
2426void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2427 ASSERT(ToRegister(instr->result()).is(r0));
2428 __ mov(r1, Operand(instr->target()));
2429 CallKnownFunction(instr->target(), instr->arity(), instr);
2430}
2431
2432
2433void LCodeGen::DoCallNew(LCallNew* instr) {
2434 ASSERT(ToRegister(instr->input()).is(r1));
2435 ASSERT(ToRegister(instr->result()).is(r0));
2436
2437 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2438 __ mov(r0, Operand(instr->arity()));
2439 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2440}
2441
2442
2443void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2444 CallRuntime(instr->function(), instr->arity(), instr);
2445}
2446
2447
2448void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002449 Register object = ToRegister(instr->object());
2450 Register value = ToRegister(instr->value());
2451 Register scratch = scratch0();
2452 int offset = instr->offset();
2453
2454 ASSERT(!object.is(value));
2455
2456 if (!instr->transition().is_null()) {
2457 __ mov(scratch, Operand(instr->transition()));
2458 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2459 }
2460
2461 // Do the store.
2462 if (instr->is_in_object()) {
2463 __ str(value, FieldMemOperand(object, offset));
2464 if (instr->needs_write_barrier()) {
2465 // Update the write barrier for the object for in-object properties.
2466 __ RecordWrite(object, Operand(offset), value, scratch);
2467 }
2468 } else {
2469 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
2470 __ str(value, FieldMemOperand(scratch, offset));
2471 if (instr->needs_write_barrier()) {
2472 // Update the write barrier for the properties array.
2473 // object is used as a scratch register.
2474 __ RecordWrite(scratch, Operand(offset), value, object);
2475 }
2476 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002477}
2478
2479
2480void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2481 ASSERT(ToRegister(instr->object()).is(r1));
2482 ASSERT(ToRegister(instr->value()).is(r0));
2483
2484 // Name is always in r2.
2485 __ mov(r2, Operand(instr->name()));
2486 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
2487 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2488}
2489
2490
2491void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002492 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01002493 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002494}
2495
2496
2497void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002498 Register value = ToRegister(instr->value());
2499 Register elements = ToRegister(instr->object());
2500 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
2501 Register scratch = scratch0();
2502
2503 // Do the store.
2504 if (instr->key()->IsConstantOperand()) {
2505 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
2506 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
2507 int offset =
2508 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
2509 __ str(value, FieldMemOperand(elements, offset));
2510 } else {
2511 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2512 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2513 }
2514
2515 if (instr->hydrogen()->NeedsWriteBarrier()) {
2516 // Compute address of modified element and store it into key register.
2517 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
2518 __ RecordWrite(elements, key, value);
2519 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002520}
2521
2522
2523void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2524 ASSERT(ToRegister(instr->object()).is(r2));
2525 ASSERT(ToRegister(instr->key()).is(r1));
2526 ASSERT(ToRegister(instr->value()).is(r0));
2527
2528 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
2529 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2530}
2531
2532
2533void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002534 LOperand* input = instr->input();
2535 ASSERT(input->IsRegister() || input->IsStackSlot());
2536 LOperand* output = instr->result();
2537 ASSERT(output->IsDoubleRegister());
2538 SwVfpRegister single_scratch = double_scratch0().low();
2539 if (input->IsStackSlot()) {
2540 Register scratch = scratch0();
2541 __ ldr(scratch, ToMemOperand(input));
2542 __ vmov(single_scratch, scratch);
2543 } else {
2544 __ vmov(single_scratch, ToRegister(input));
2545 }
2546 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002547}
2548
2549
2550void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
2551 class DeferredNumberTagI: public LDeferredCode {
2552 public:
2553 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
2554 : LDeferredCode(codegen), instr_(instr) { }
2555 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
2556 private:
2557 LNumberTagI* instr_;
2558 };
2559
2560 LOperand* input = instr->input();
2561 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2562 Register reg = ToRegister(input);
2563
2564 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
2565 __ SmiTag(reg, SetCC);
2566 __ b(vs, deferred->entry());
2567 __ bind(deferred->exit());
2568}
2569
2570
2571void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
2572 Label slow;
2573 Register reg = ToRegister(instr->input());
2574 DoubleRegister dbl_scratch = d0;
2575 SwVfpRegister flt_scratch = s0;
2576
2577 // Preserve the value of all registers.
2578 __ PushSafepointRegisters();
2579
2580 // There was overflow, so bits 30 and 31 of the original integer
2581 // disagree. Try to allocate a heap number in new space and store
2582 // the value in there. If that fails, call the runtime system.
2583 Label done;
2584 __ SmiUntag(reg);
2585 __ eor(reg, reg, Operand(0x80000000));
2586 __ vmov(flt_scratch, reg);
2587 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
2588 if (FLAG_inline_new) {
2589 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2590 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
2591 if (!reg.is(r5)) __ mov(reg, r5);
2592 __ b(&done);
2593 }
2594
2595 // Slow case: Call the runtime system to do the number allocation.
2596 __ bind(&slow);
2597
2598 // TODO(3095996): Put a valid pointer value in the stack slot where the result
2599 // register is stored, as this register is in the pointer map, but contains an
2600 // integer value.
2601 __ mov(ip, Operand(0));
2602 int reg_stack_index = __ SafepointRegisterStackIndex(reg.code());
2603 __ str(ip, MemOperand(sp, reg_stack_index * kPointerSize));
2604
2605 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2606 RecordSafepointWithRegisters(
2607 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2608 if (!reg.is(r0)) __ mov(reg, r0);
2609
2610 // Done. Put the value in dbl_scratch into the value of the allocated heap
2611 // number.
2612 __ bind(&done);
2613 __ sub(ip, reg, Operand(kHeapObjectTag));
2614 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
2615 __ str(reg, MemOperand(sp, reg_stack_index * kPointerSize));
2616 __ PopSafepointRegisters();
2617}
2618
2619
2620void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
2621 class DeferredNumberTagD: public LDeferredCode {
2622 public:
2623 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
2624 : LDeferredCode(codegen), instr_(instr) { }
2625 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
2626 private:
2627 LNumberTagD* instr_;
2628 };
2629
2630 DoubleRegister input_reg = ToDoubleRegister(instr->input());
Steve Block9fac8402011-05-12 15:51:54 +01002631 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002632 Register reg = ToRegister(instr->result());
2633 Register temp1 = ToRegister(instr->temp1());
2634 Register temp2 = ToRegister(instr->temp2());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002635
2636 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
2637 if (FLAG_inline_new) {
2638 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
2639 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
2640 } else {
2641 __ jmp(deferred->entry());
2642 }
2643 __ bind(deferred->exit());
2644 __ sub(ip, reg, Operand(kHeapObjectTag));
2645 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
2646}
2647
2648
2649void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
2650 // TODO(3095996): Get rid of this. For now, we need to make the
2651 // result register contain a valid pointer because it is already
2652 // contained in the register pointer map.
2653 Register reg = ToRegister(instr->result());
2654 __ mov(reg, Operand(0));
2655
2656 __ PushSafepointRegisters();
2657 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2658 RecordSafepointWithRegisters(
2659 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2660 int reg_stack_index = __ SafepointRegisterStackIndex(reg.code());
2661 __ str(r0, MemOperand(sp, reg_stack_index * kPointerSize));
2662 __ PopSafepointRegisters();
2663}
2664
2665
2666void LCodeGen::DoSmiTag(LSmiTag* instr) {
2667 LOperand* input = instr->input();
2668 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2669 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
2670 __ SmiTag(ToRegister(input));
2671}
2672
2673
2674void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002675 LOperand* input = instr->input();
2676 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2677 if (instr->needs_check()) {
2678 __ tst(ToRegister(input), Operand(kSmiTagMask));
2679 DeoptimizeIf(ne, instr->environment());
2680 }
2681 __ SmiUntag(ToRegister(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002682}
2683
2684
2685void LCodeGen::EmitNumberUntagD(Register input_reg,
2686 DoubleRegister result_reg,
2687 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01002688 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002689 SwVfpRegister flt_scratch = s0;
2690 ASSERT(!result_reg.is(d0));
2691
2692 Label load_smi, heap_number, done;
2693
2694 // Smi check.
2695 __ tst(input_reg, Operand(kSmiTagMask));
2696 __ b(eq, &load_smi);
2697
2698 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01002699 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002700 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01002701 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002702 __ b(eq, &heap_number);
2703
2704 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2705 __ cmp(input_reg, Operand(ip));
2706 DeoptimizeIf(ne, env);
2707
2708 // Convert undefined to NaN.
2709 __ LoadRoot(ip, Heap::kNanValueRootIndex);
2710 __ sub(ip, ip, Operand(kHeapObjectTag));
2711 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
2712 __ jmp(&done);
2713
2714 // Heap number to double register conversion.
2715 __ bind(&heap_number);
2716 __ sub(ip, input_reg, Operand(kHeapObjectTag));
2717 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
2718 __ jmp(&done);
2719
2720 // Smi to double register conversion
2721 __ bind(&load_smi);
2722 __ SmiUntag(input_reg); // Untag smi before converting to float.
2723 __ vmov(flt_scratch, input_reg);
2724 __ vcvt_f64_s32(result_reg, flt_scratch);
2725 __ SmiTag(input_reg); // Retag smi.
2726 __ bind(&done);
2727}
2728
2729
2730class DeferredTaggedToI: public LDeferredCode {
2731 public:
2732 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
2733 : LDeferredCode(codegen), instr_(instr) { }
2734 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
2735 private:
2736 LTaggedToI* instr_;
2737};
2738
2739
2740void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
2741 Label done;
2742 Register input_reg = ToRegister(instr->input());
Steve Block9fac8402011-05-12 15:51:54 +01002743 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002744 DoubleRegister dbl_scratch = d0;
2745 SwVfpRegister flt_scratch = s0;
2746 DoubleRegister dbl_tmp = ToDoubleRegister(instr->temp());
2747
2748 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01002749 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002750 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01002751 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002752
2753 if (instr->truncating()) {
2754 Label heap_number;
2755 __ b(eq, &heap_number);
2756 // Check for undefined. Undefined is converted to zero for truncating
2757 // conversions.
2758 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2759 __ cmp(input_reg, Operand(ip));
2760 DeoptimizeIf(ne, instr->environment());
2761 __ mov(input_reg, Operand(0));
2762 __ b(&done);
2763
2764 __ bind(&heap_number);
2765 __ sub(ip, input_reg, Operand(kHeapObjectTag));
2766 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002767 __ vcmp(dbl_tmp, 0.0); // Sets overflow bit in FPSCR flags if NaN.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002768 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
2769 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
2770 __ vmrs(pc); // Move vector status bits to normal status bits.
2771 // Overflow bit is set if dbl_tmp is Nan.
2772 __ cmn(input_reg, Operand(1), vc); // 0x7fffffff + 1 -> overflow.
2773 __ cmp(input_reg, Operand(1), vc); // 0x80000000 - 1 -> overflow.
2774 DeoptimizeIf(vs, instr->environment()); // Saturation may have occured.
2775
2776 } else {
2777 // Deoptimize if we don't have a heap number.
2778 DeoptimizeIf(ne, instr->environment());
2779
2780 __ sub(ip, input_reg, Operand(kHeapObjectTag));
2781 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
2782 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
2783 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
2784 // Non-truncating conversion means that we cannot lose bits, so we convert
2785 // back to check; note that using non-overlapping s and d regs would be
2786 // slightly faster.
2787 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002788 __ VFPCompareAndSetFlags(dbl_scratch, dbl_tmp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002789 DeoptimizeIf(ne, instr->environment()); // Not equal or unordered.
2790 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2791 __ tst(input_reg, Operand(input_reg));
2792 __ b(ne, &done);
2793 __ vmov(lr, ip, dbl_tmp);
2794 __ tst(ip, Operand(1 << 31)); // Test sign bit.
2795 DeoptimizeIf(ne, instr->environment());
2796 }
2797 }
2798 __ bind(&done);
2799}
2800
2801
2802void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
2803 LOperand* input = instr->input();
2804 ASSERT(input->IsRegister());
2805 ASSERT(input->Equals(instr->result()));
2806
2807 Register input_reg = ToRegister(input);
2808
2809 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
2810
2811 // Smi check.
2812 __ tst(input_reg, Operand(kSmiTagMask));
2813 __ b(ne, deferred->entry());
2814
2815 // Smi to int32 conversion
2816 __ SmiUntag(input_reg); // Untag smi.
2817
2818 __ bind(deferred->exit());
2819}
2820
2821
2822void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
2823 LOperand* input = instr->input();
2824 ASSERT(input->IsRegister());
2825 LOperand* result = instr->result();
2826 ASSERT(result->IsDoubleRegister());
2827
2828 Register input_reg = ToRegister(input);
2829 DoubleRegister result_reg = ToDoubleRegister(result);
2830
2831 EmitNumberUntagD(input_reg, result_reg, instr->environment());
2832}
2833
2834
2835void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
2836 Abort("DoDoubleToI unimplemented.");
2837}
2838
2839
2840void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
2841 LOperand* input = instr->input();
2842 ASSERT(input->IsRegister());
2843 __ tst(ToRegister(input), Operand(kSmiTagMask));
2844 DeoptimizeIf(instr->condition(), instr->environment());
2845}
2846
2847
2848void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002849 Register input = ToRegister(instr->input());
2850 Register scratch = scratch0();
2851 InstanceType first = instr->hydrogen()->first();
2852 InstanceType last = instr->hydrogen()->last();
2853
2854 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2855 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2856 __ cmp(scratch, Operand(first));
2857
2858 // If there is only one type in the interval check for equality.
2859 if (first == last) {
2860 DeoptimizeIf(ne, instr->environment());
2861 } else {
2862 DeoptimizeIf(lo, instr->environment());
2863 // Omit check for the last type.
2864 if (last != LAST_TYPE) {
2865 __ cmp(scratch, Operand(last));
2866 DeoptimizeIf(hi, instr->environment());
2867 }
2868 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002869}
2870
2871
2872void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
2873 ASSERT(instr->input()->IsRegister());
2874 Register reg = ToRegister(instr->input());
2875 __ cmp(reg, Operand(instr->hydrogen()->target()));
2876 DeoptimizeIf(ne, instr->environment());
2877}
2878
2879
2880void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002881 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002882 LOperand* input = instr->input();
2883 ASSERT(input->IsRegister());
2884 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01002885 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
2886 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002887 DeoptimizeIf(ne, instr->environment());
2888}
2889
2890
Ben Murdochb8e0da22011-05-16 14:20:40 +01002891void LCodeGen::LoadHeapObject(Register result,
2892 Handle<HeapObject> object) {
2893 if (Heap::InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01002894 Handle<JSGlobalPropertyCell> cell =
Ben Murdochb8e0da22011-05-16 14:20:40 +01002895 Factory::NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01002896 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002897 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01002898 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002899 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01002900 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002901}
2902
2903
2904void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002905 Register temp1 = ToRegister(instr->temp1());
2906 Register temp2 = ToRegister(instr->temp2());
2907
2908 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01002909 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01002910
2911 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01002912 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01002913
2914 // Check prototype maps up to the holder.
2915 while (!current_prototype.is_identical_to(holder)) {
2916 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
2917 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
2918 DeoptimizeIf(ne, instr->environment());
2919 current_prototype =
2920 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
2921 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01002922 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01002923 }
2924
2925 // Check the holder map.
2926 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
2927 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
2928 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002929}
2930
2931
2932void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002933 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2934 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
2935 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2936 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
2937 __ Push(r3, r2, r1);
2938
2939 // Pick the right runtime function or stub to call.
2940 int length = instr->hydrogen()->length();
2941 if (instr->hydrogen()->IsCopyOnWrite()) {
2942 ASSERT(instr->hydrogen()->depth() == 1);
2943 FastCloneShallowArrayStub::Mode mode =
2944 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
2945 FastCloneShallowArrayStub stub(mode, length);
2946 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2947 } else if (instr->hydrogen()->depth() > 1) {
2948 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
2949 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
2950 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
2951 } else {
2952 FastCloneShallowArrayStub::Mode mode =
2953 FastCloneShallowArrayStub::CLONE_ELEMENTS;
2954 FastCloneShallowArrayStub stub(mode, length);
2955 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2956 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002957}
2958
2959
2960void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002961 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2962 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
2963 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2964 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
2965 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
2966 __ Push(r4, r3, r2, r1);
2967
2968 // Pick the right runtime function to call.
2969 if (instr->hydrogen()->depth() > 1) {
2970 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
2971 } else {
2972 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
2973 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002974}
2975
2976
2977void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002978 Label materialized;
2979 // Registers will be used as follows:
2980 // r3 = JS function.
2981 // r7 = literals array.
2982 // r1 = regexp literal.
2983 // r0 = regexp literal clone.
2984 // r2 and r4-r6 are used as temporaries.
2985 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2986 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
2987 int literal_offset = FixedArray::kHeaderSize +
2988 instr->hydrogen()->literal_index() * kPointerSize;
2989 __ ldr(r1, FieldMemOperand(r7, literal_offset));
2990 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2991 __ cmp(r1, ip);
2992 __ b(ne, &materialized);
2993
2994 // Create regexp literal using runtime function
2995 // Result will be in r0.
2996 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2997 __ mov(r5, Operand(instr->hydrogen()->pattern()));
2998 __ mov(r4, Operand(instr->hydrogen()->flags()));
2999 __ Push(r7, r6, r5, r4);
3000 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3001 __ mov(r1, r0);
3002
3003 __ bind(&materialized);
3004 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3005 Label allocated, runtime_allocate;
3006
3007 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
3008 __ jmp(&allocated);
3009
3010 __ bind(&runtime_allocate);
3011 __ mov(r0, Operand(Smi::FromInt(size)));
3012 __ Push(r1, r0);
3013 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3014 __ pop(r1);
3015
3016 __ bind(&allocated);
3017 // Copy the content into the newly allocated memory.
3018 // (Unroll copy loop once for better throughput).
3019 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3020 __ ldr(r3, FieldMemOperand(r1, i));
3021 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
3022 __ str(r3, FieldMemOperand(r0, i));
3023 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
3024 }
3025 if ((size % (2 * kPointerSize)) != 0) {
3026 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
3027 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
3028 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003029}
3030
3031
3032void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003033 // Use the fast case closure allocation code that allocates in new
3034 // space for nested functions that don't need literals cloning.
3035 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3036 bool pretenure = !instr->hydrogen()->pretenure();
3037 if (shared_info->num_literals() == 0 && !pretenure) {
3038 FastNewClosureStub stub;
3039 __ mov(r1, Operand(shared_info));
3040 __ push(r1);
3041 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3042 } else {
3043 __ mov(r2, Operand(shared_info));
3044 __ mov(r1, Operand(pretenure
3045 ? Factory::true_value()
3046 : Factory::false_value()));
3047 __ Push(cp, r2, r1);
3048 CallRuntime(Runtime::kNewClosure, 3, instr);
3049 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003050}
3051
3052
3053void LCodeGen::DoTypeof(LTypeof* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003054 Register input = ToRegister(instr->input());
3055 __ push(input);
3056 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003057}
3058
3059
3060void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003061 Register input = ToRegister(instr->input());
3062 Register result = ToRegister(instr->result());
3063 Label true_label;
3064 Label false_label;
3065 Label done;
3066
3067 Condition final_branch_condition = EmitTypeofIs(&true_label,
3068 &false_label,
3069 input,
3070 instr->type_literal());
3071 __ b(final_branch_condition, &true_label);
3072 __ bind(&false_label);
3073 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3074 __ b(&done);
3075
3076 __ bind(&true_label);
3077 __ LoadRoot(result, Heap::kTrueValueRootIndex);
3078
3079 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003080}
3081
3082
3083void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
3084 Register input = ToRegister(instr->input());
3085 int true_block = chunk_->LookupDestination(instr->true_block_id());
3086 int false_block = chunk_->LookupDestination(instr->false_block_id());
3087 Label* true_label = chunk_->GetAssemblyLabel(true_block);
3088 Label* false_label = chunk_->GetAssemblyLabel(false_block);
3089
3090 Condition final_branch_condition = EmitTypeofIs(true_label,
3091 false_label,
3092 input,
3093 instr->type_literal());
3094
3095 EmitBranch(true_block, false_block, final_branch_condition);
3096}
3097
3098
3099Condition LCodeGen::EmitTypeofIs(Label* true_label,
3100 Label* false_label,
3101 Register input,
3102 Handle<String> type_name) {
3103 Condition final_branch_condition = no_condition;
Steve Block9fac8402011-05-12 15:51:54 +01003104 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003105 if (type_name->Equals(Heap::number_symbol())) {
3106 __ tst(input, Operand(kSmiTagMask));
3107 __ b(eq, true_label);
3108 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3109 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3110 __ cmp(input, Operand(ip));
3111 final_branch_condition = eq;
3112
3113 } else if (type_name->Equals(Heap::string_symbol())) {
3114 __ tst(input, Operand(kSmiTagMask));
3115 __ b(eq, false_label);
3116 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3117 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3118 __ tst(ip, Operand(1 << Map::kIsUndetectable));
3119 __ b(ne, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01003120 __ CompareInstanceType(input, scratch, FIRST_NONSTRING_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003121 final_branch_condition = lo;
3122
3123 } else if (type_name->Equals(Heap::boolean_symbol())) {
3124 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
3125 __ cmp(input, ip);
3126 __ b(eq, true_label);
3127 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
3128 __ cmp(input, ip);
3129 final_branch_condition = eq;
3130
3131 } else if (type_name->Equals(Heap::undefined_symbol())) {
3132 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3133 __ cmp(input, ip);
3134 __ b(eq, true_label);
3135 __ tst(input, Operand(kSmiTagMask));
3136 __ b(eq, false_label);
3137 // Check for undetectable objects => true.
3138 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3139 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3140 __ tst(ip, Operand(1 << Map::kIsUndetectable));
3141 final_branch_condition = ne;
3142
3143 } else if (type_name->Equals(Heap::function_symbol())) {
3144 __ tst(input, Operand(kSmiTagMask));
3145 __ b(eq, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01003146 __ CompareObjectType(input, input, scratch, JS_FUNCTION_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003147 __ b(eq, true_label);
3148 // Regular expressions => 'function' (they are callable).
Steve Block9fac8402011-05-12 15:51:54 +01003149 __ CompareInstanceType(input, scratch, JS_REGEXP_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003150 final_branch_condition = eq;
3151
3152 } else if (type_name->Equals(Heap::object_symbol())) {
3153 __ tst(input, Operand(kSmiTagMask));
3154 __ b(eq, false_label);
3155 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3156 __ cmp(input, ip);
3157 __ b(eq, true_label);
3158 // Regular expressions => 'function', not 'object'.
Steve Block9fac8402011-05-12 15:51:54 +01003159 __ CompareObjectType(input, input, scratch, JS_REGEXP_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003160 __ b(eq, false_label);
3161 // Check for undetectable objects => false.
3162 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3163 __ tst(ip, Operand(1 << Map::kIsUndetectable));
3164 __ b(ne, false_label);
3165 // Check for JS objects => true.
Steve Block9fac8402011-05-12 15:51:54 +01003166 __ CompareInstanceType(input, scratch, FIRST_JS_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003167 __ b(lo, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01003168 __ CompareInstanceType(input, scratch, LAST_JS_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003169 final_branch_condition = ls;
3170
3171 } else {
3172 final_branch_condition = ne;
3173 __ b(false_label);
3174 // A dead branch instruction will be generated after this point.
3175 }
3176
3177 return final_branch_condition;
3178}
3179
3180
3181void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
3182 // No code for lazy bailout instruction. Used to capture environment after a
3183 // call for populating the safepoint data with deoptimization data.
3184}
3185
3186
3187void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
3188 DeoptimizeIf(no_condition, instr->environment());
3189}
3190
3191
3192void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003193 Register object = ToRegister(instr->object());
3194 Register key = ToRegister(instr->key());
3195 __ Push(object, key);
3196 RecordPosition(instr->pointer_map()->position());
3197 SafepointGenerator safepoint_generator(this,
3198 instr->pointer_map(),
3199 Safepoint::kNoDeoptimizationIndex);
3200 __ InvokeBuiltin(Builtins::DELETE, CALL_JS, &safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003201}
3202
3203
3204void LCodeGen::DoStackCheck(LStackCheck* instr) {
3205 // Perform stack overflow check.
3206 Label ok;
3207 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
3208 __ cmp(sp, Operand(ip));
3209 __ b(hs, &ok);
3210 StackCheckStub stub;
3211 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3212 __ bind(&ok);
3213}
3214
3215
3216void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
3217 Abort("DoOsrEntry unimplemented.");
3218}
3219
3220
3221#undef __
3222
3223} } // namespace v8::internal