blob: 057ac241c599653b53ff2ba5036fc4d032f32976 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "arm/lithium-codegen-arm.h"
29#include "code-stubs.h"
30#include "stub-cache.h"
31
32namespace v8 {
33namespace internal {
34
35
36class SafepointGenerator : public PostCallGenerator {
37 public:
38 SafepointGenerator(LCodeGen* codegen,
39 LPointerMap* pointers,
40 int deoptimization_index)
41 : codegen_(codegen),
42 pointers_(pointers),
43 deoptimization_index_(deoptimization_index) { }
44 virtual ~SafepointGenerator() { }
45
46 virtual void Generate() {
47 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
48 }
49
50 private:
51 LCodeGen* codegen_;
52 LPointerMap* pointers_;
53 int deoptimization_index_;
54};
55
56
Ben Murdochb8e0da22011-05-16 14:20:40 +010057class LGapNode: public ZoneObject {
58 public:
59 explicit LGapNode(LOperand* operand)
60 : operand_(operand), resolved_(false), visited_id_(-1) { }
61
62 LOperand* operand() const { return operand_; }
63 bool IsResolved() const { return !IsAssigned() || resolved_; }
64 void MarkResolved() {
65 ASSERT(!IsResolved());
66 resolved_ = true;
67 }
68 int visited_id() const { return visited_id_; }
69 void set_visited_id(int id) {
70 ASSERT(id > visited_id_);
71 visited_id_ = id;
72 }
73
74 bool IsAssigned() const { return assigned_from_.is_set(); }
75 LGapNode* assigned_from() const { return assigned_from_.get(); }
76 void set_assigned_from(LGapNode* n) { assigned_from_.set(n); }
77
78 private:
79 LOperand* operand_;
80 SetOncePointer<LGapNode> assigned_from_;
81 bool resolved_;
82 int visited_id_;
83};
84
85
86LGapResolver::LGapResolver()
87 : nodes_(32),
88 identified_cycles_(4),
89 result_(16),
90 next_visited_id_(0) {
91}
92
93
94const ZoneList<LMoveOperands>* LGapResolver::Resolve(
95 const ZoneList<LMoveOperands>* moves,
96 LOperand* marker_operand) {
97 nodes_.Rewind(0);
98 identified_cycles_.Rewind(0);
99 result_.Rewind(0);
100 next_visited_id_ = 0;
101
102 for (int i = 0; i < moves->length(); ++i) {
103 LMoveOperands move = moves->at(i);
104 if (!move.IsRedundant()) RegisterMove(move);
105 }
106
107 for (int i = 0; i < identified_cycles_.length(); ++i) {
108 ResolveCycle(identified_cycles_[i], marker_operand);
109 }
110
111 int unresolved_nodes;
112 do {
113 unresolved_nodes = 0;
114 for (int j = 0; j < nodes_.length(); j++) {
115 LGapNode* node = nodes_[j];
116 if (!node->IsResolved() && node->assigned_from()->IsResolved()) {
117 AddResultMove(node->assigned_from(), node);
118 node->MarkResolved();
119 }
120 if (!node->IsResolved()) ++unresolved_nodes;
121 }
122 } while (unresolved_nodes > 0);
123 return &result_;
124}
125
126
127void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) {
128 AddResultMove(from->operand(), to->operand());
129}
130
131
132void LGapResolver::AddResultMove(LOperand* from, LOperand* to) {
133 result_.Add(LMoveOperands(from, to));
134}
135
136
137void LGapResolver::ResolveCycle(LGapNode* start, LOperand* marker_operand) {
138 ZoneList<LOperand*> cycle_operands(8);
139 cycle_operands.Add(marker_operand);
140 LGapNode* cur = start;
141 do {
142 cur->MarkResolved();
143 cycle_operands.Add(cur->operand());
144 cur = cur->assigned_from();
145 } while (cur != start);
146 cycle_operands.Add(marker_operand);
147
148 for (int i = cycle_operands.length() - 1; i > 0; --i) {
149 LOperand* from = cycle_operands[i];
150 LOperand* to = cycle_operands[i - 1];
151 AddResultMove(from, to);
152 }
153}
154
155
156bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) {
157 ASSERT(a != b);
158 LGapNode* cur = a;
159 while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) {
160 cur->set_visited_id(visited_id);
161 cur = cur->assigned_from();
162 }
163
164 return cur == b;
165}
166
167
168bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) {
169 ASSERT(a != b);
170 return CanReach(a, b, next_visited_id_++);
171}
172
173
174void LGapResolver::RegisterMove(LMoveOperands move) {
175 if (move.source()->IsConstantOperand()) {
176 // Constant moves should be last in the machine code. Therefore add them
177 // first to the result set.
178 AddResultMove(move.source(), move.destination());
179 } else {
180 LGapNode* from = LookupNode(move.source());
181 LGapNode* to = LookupNode(move.destination());
182 if (to->IsAssigned() && to->assigned_from() == from) {
183 move.Eliminate();
184 return;
185 }
186 ASSERT(!to->IsAssigned());
187 if (CanReach(from, to)) {
188 // This introduces a cycle. Save.
189 identified_cycles_.Add(from);
190 }
191 to->set_assigned_from(from);
192 }
193}
194
195
196LGapNode* LGapResolver::LookupNode(LOperand* operand) {
197 for (int i = 0; i < nodes_.length(); ++i) {
198 if (nodes_[i]->operand()->Equals(operand)) return nodes_[i];
199 }
200
201 // No node found => create a new one.
202 LGapNode* result = new LGapNode(operand);
203 nodes_.Add(result);
204 return result;
205}
206
207
Ben Murdochb0fe1622011-05-05 13:52:32 +0100208#define __ masm()->
209
210bool LCodeGen::GenerateCode() {
211 HPhase phase("Code generation", chunk());
212 ASSERT(is_unused());
213 status_ = GENERATING;
214 CpuFeatures::Scope scope1(VFP3);
215 CpuFeatures::Scope scope2(ARMv7);
216 return GeneratePrologue() &&
217 GenerateBody() &&
218 GenerateDeferredCode() &&
219 GenerateSafepointTable();
220}
221
222
223void LCodeGen::FinishCode(Handle<Code> code) {
224 ASSERT(is_done());
225 code->set_stack_slots(StackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +0100226 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100227 PopulateDeoptimizationData(code);
228}
229
230
231void LCodeGen::Abort(const char* format, ...) {
232 if (FLAG_trace_bailout) {
233 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
234 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
235 va_list arguments;
236 va_start(arguments, format);
237 OS::VPrint(format, arguments);
238 va_end(arguments);
239 PrintF("\n");
240 }
241 status_ = ABORTED;
242}
243
244
245void LCodeGen::Comment(const char* format, ...) {
246 if (!FLAG_code_comments) return;
247 char buffer[4 * KB];
248 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
249 va_list arguments;
250 va_start(arguments, format);
251 builder.AddFormattedList(format, arguments);
252 va_end(arguments);
253
254 // Copy the string before recording it in the assembler to avoid
255 // issues when the stack allocated buffer goes out of scope.
256 size_t length = builder.position();
257 Vector<char> copy = Vector<char>::New(length + 1);
258 memcpy(copy.start(), builder.Finalize(), copy.length());
259 masm()->RecordComment(copy.start());
260}
261
262
263bool LCodeGen::GeneratePrologue() {
264 ASSERT(is_generating());
265
266#ifdef DEBUG
267 if (strlen(FLAG_stop_at) > 0 &&
268 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
269 __ stop("stop_at");
270 }
271#endif
272
273 // r1: Callee's JS function.
274 // cp: Callee's context.
275 // fp: Caller's frame pointer.
276 // lr: Caller's pc.
277
278 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
279 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
280
281 // Reserve space for the stack slots needed by the code.
282 int slots = StackSlotCount();
283 if (slots > 0) {
284 if (FLAG_debug_code) {
285 __ mov(r0, Operand(slots));
286 __ mov(r2, Operand(kSlotsZapValue));
287 Label loop;
288 __ bind(&loop);
289 __ push(r2);
290 __ sub(r0, r0, Operand(1), SetCC);
291 __ b(ne, &loop);
292 } else {
293 __ sub(sp, sp, Operand(slots * kPointerSize));
294 }
295 }
296
297 // Trace the call.
298 if (FLAG_trace) {
299 __ CallRuntime(Runtime::kTraceEnter, 0);
300 }
301 return !is_aborted();
302}
303
304
305bool LCodeGen::GenerateBody() {
306 ASSERT(is_generating());
307 bool emit_instructions = true;
308 for (current_instruction_ = 0;
309 !is_aborted() && current_instruction_ < instructions_->length();
310 current_instruction_++) {
311 LInstruction* instr = instructions_->at(current_instruction_);
312 if (instr->IsLabel()) {
313 LLabel* label = LLabel::cast(instr);
314 emit_instructions = !label->HasReplacement();
315 }
316
317 if (emit_instructions) {
318 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
319 instr->CompileToNative(this);
320 }
321 }
322 return !is_aborted();
323}
324
325
326LInstruction* LCodeGen::GetNextInstruction() {
327 if (current_instruction_ < instructions_->length() - 1) {
328 return instructions_->at(current_instruction_ + 1);
329 } else {
330 return NULL;
331 }
332}
333
334
335bool LCodeGen::GenerateDeferredCode() {
336 ASSERT(is_generating());
337 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
338 LDeferredCode* code = deferred_[i];
339 __ bind(code->entry());
340 code->Generate();
341 __ jmp(code->exit());
342 }
343
Ben Murdochb8e0da22011-05-16 14:20:40 +0100344 // Force constant pool emission at the end of deferred code to make
345 // sure that no constant pools are emitted after the official end of
346 // the instruction sequence.
347 masm()->CheckConstPool(true, false);
348
Ben Murdochb0fe1622011-05-05 13:52:32 +0100349 // Deferred code is the last part of the instruction sequence. Mark
350 // the generated code as done unless we bailed out.
351 if (!is_aborted()) status_ = DONE;
352 return !is_aborted();
353}
354
355
356bool LCodeGen::GenerateSafepointTable() {
357 ASSERT(is_done());
358 safepoints_.Emit(masm(), StackSlotCount());
359 return !is_aborted();
360}
361
362
363Register LCodeGen::ToRegister(int index) const {
364 return Register::FromAllocationIndex(index);
365}
366
367
368DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
369 return DoubleRegister::FromAllocationIndex(index);
370}
371
372
373Register LCodeGen::ToRegister(LOperand* op) const {
374 ASSERT(op->IsRegister());
375 return ToRegister(op->index());
376}
377
378
379Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
380 if (op->IsRegister()) {
381 return ToRegister(op->index());
382 } else if (op->IsConstantOperand()) {
383 __ mov(scratch, ToOperand(op));
384 return scratch;
385 } else if (op->IsStackSlot() || op->IsArgument()) {
386 __ ldr(scratch, ToMemOperand(op));
387 return scratch;
388 }
389 UNREACHABLE();
390 return scratch;
391}
392
393
394DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
395 ASSERT(op->IsDoubleRegister());
396 return ToDoubleRegister(op->index());
397}
398
399
400DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
401 SwVfpRegister flt_scratch,
402 DoubleRegister dbl_scratch) {
403 if (op->IsDoubleRegister()) {
404 return ToDoubleRegister(op->index());
405 } else if (op->IsConstantOperand()) {
406 LConstantOperand* const_op = LConstantOperand::cast(op);
407 Handle<Object> literal = chunk_->LookupLiteral(const_op);
408 Representation r = chunk_->LookupLiteralRepresentation(const_op);
409 if (r.IsInteger32()) {
410 ASSERT(literal->IsNumber());
411 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
412 __ vmov(flt_scratch, ip);
413 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
414 return dbl_scratch;
415 } else if (r.IsDouble()) {
416 Abort("unsupported double immediate");
417 } else if (r.IsTagged()) {
418 Abort("unsupported tagged immediate");
419 }
420 } else if (op->IsStackSlot() || op->IsArgument()) {
421 // TODO(regis): Why is vldr not taking a MemOperand?
422 // __ vldr(dbl_scratch, ToMemOperand(op));
423 MemOperand mem_op = ToMemOperand(op);
424 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
425 return dbl_scratch;
426 }
427 UNREACHABLE();
428 return dbl_scratch;
429}
430
431
432int LCodeGen::ToInteger32(LConstantOperand* op) const {
433 Handle<Object> value = chunk_->LookupLiteral(op);
434 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
435 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
436 value->Number());
437 return static_cast<int32_t>(value->Number());
438}
439
440
441Operand LCodeGen::ToOperand(LOperand* op) {
442 if (op->IsConstantOperand()) {
443 LConstantOperand* const_op = LConstantOperand::cast(op);
444 Handle<Object> literal = chunk_->LookupLiteral(const_op);
445 Representation r = chunk_->LookupLiteralRepresentation(const_op);
446 if (r.IsInteger32()) {
447 ASSERT(literal->IsNumber());
448 return Operand(static_cast<int32_t>(literal->Number()));
449 } else if (r.IsDouble()) {
450 Abort("ToOperand Unsupported double immediate.");
451 }
452 ASSERT(r.IsTagged());
453 return Operand(literal);
454 } else if (op->IsRegister()) {
455 return Operand(ToRegister(op));
456 } else if (op->IsDoubleRegister()) {
457 Abort("ToOperand IsDoubleRegister unimplemented");
458 return Operand(0);
459 }
460 // Stack slots not implemented, use ToMemOperand instead.
461 UNREACHABLE();
462 return Operand(0);
463}
464
465
466MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
467 // TODO(regis): Revisit.
468 ASSERT(!op->IsRegister());
469 ASSERT(!op->IsDoubleRegister());
470 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
471 int index = op->index();
472 if (index >= 0) {
473 // Local or spill slot. Skip the frame pointer, function, and
474 // context in the fixed part of the frame.
475 return MemOperand(fp, -(index + 3) * kPointerSize);
476 } else {
477 // Incoming parameter. Skip the return address.
478 return MemOperand(fp, -(index - 1) * kPointerSize);
479 }
480}
481
482
Ben Murdochb8e0da22011-05-16 14:20:40 +0100483void LCodeGen::WriteTranslation(LEnvironment* environment,
484 Translation* translation) {
485 if (environment == NULL) return;
486
487 // The translation includes one command per value in the environment.
488 int translation_size = environment->values()->length();
489 // The output frame height does not include the parameters.
490 int height = translation_size - environment->parameter_count();
491
492 WriteTranslation(environment->outer(), translation);
493 int closure_id = DefineDeoptimizationLiteral(environment->closure());
494 translation->BeginFrame(environment->ast_id(), closure_id, height);
495 for (int i = 0; i < translation_size; ++i) {
496 LOperand* value = environment->values()->at(i);
497 // spilled_registers_ and spilled_double_registers_ are either
498 // both NULL or both set.
499 if (environment->spilled_registers() != NULL && value != NULL) {
500 if (value->IsRegister() &&
501 environment->spilled_registers()[value->index()] != NULL) {
502 translation->MarkDuplicate();
503 AddToTranslation(translation,
504 environment->spilled_registers()[value->index()],
505 environment->HasTaggedValueAt(i));
506 } else if (
507 value->IsDoubleRegister() &&
508 environment->spilled_double_registers()[value->index()] != NULL) {
509 translation->MarkDuplicate();
510 AddToTranslation(
511 translation,
512 environment->spilled_double_registers()[value->index()],
513 false);
514 }
515 }
516
517 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
518 }
519}
520
521
Ben Murdochb0fe1622011-05-05 13:52:32 +0100522void LCodeGen::AddToTranslation(Translation* translation,
523 LOperand* op,
524 bool is_tagged) {
525 if (op == NULL) {
526 // TODO(twuerthinger): Introduce marker operands to indicate that this value
527 // is not present and must be reconstructed from the deoptimizer. Currently
528 // this is only used for the arguments object.
529 translation->StoreArgumentsObject();
530 } else if (op->IsStackSlot()) {
531 if (is_tagged) {
532 translation->StoreStackSlot(op->index());
533 } else {
534 translation->StoreInt32StackSlot(op->index());
535 }
536 } else if (op->IsDoubleStackSlot()) {
537 translation->StoreDoubleStackSlot(op->index());
538 } else if (op->IsArgument()) {
539 ASSERT(is_tagged);
540 int src_index = StackSlotCount() + op->index();
541 translation->StoreStackSlot(src_index);
542 } else if (op->IsRegister()) {
543 Register reg = ToRegister(op);
544 if (is_tagged) {
545 translation->StoreRegister(reg);
546 } else {
547 translation->StoreInt32Register(reg);
548 }
549 } else if (op->IsDoubleRegister()) {
550 DoubleRegister reg = ToDoubleRegister(op);
551 translation->StoreDoubleRegister(reg);
552 } else if (op->IsConstantOperand()) {
553 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
554 int src_index = DefineDeoptimizationLiteral(literal);
555 translation->StoreLiteral(src_index);
556 } else {
557 UNREACHABLE();
558 }
559}
560
561
562void LCodeGen::CallCode(Handle<Code> code,
563 RelocInfo::Mode mode,
564 LInstruction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +0100565 ASSERT(instr != NULL);
566 LPointerMap* pointers = instr->pointer_map();
567 RecordPosition(pointers->position());
568 __ Call(code, mode);
569 RegisterLazyDeoptimization(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100570}
571
572
573void LCodeGen::CallRuntime(Runtime::Function* function,
574 int num_arguments,
575 LInstruction* instr) {
576 ASSERT(instr != NULL);
577 LPointerMap* pointers = instr->pointer_map();
578 ASSERT(pointers != NULL);
579 RecordPosition(pointers->position());
580
581 __ CallRuntime(function, num_arguments);
Steve Block1e0659c2011-05-24 12:43:12 +0100582 RegisterLazyDeoptimization(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100583}
584
585
586void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
587 // Create the environment to bailout to. If the call has side effects
588 // execution has to continue after the call otherwise execution can continue
589 // from a previous bailout point repeating the call.
590 LEnvironment* deoptimization_environment;
591 if (instr->HasDeoptimizationEnvironment()) {
592 deoptimization_environment = instr->deoptimization_environment();
593 } else {
594 deoptimization_environment = instr->environment();
595 }
596
597 RegisterEnvironmentForDeoptimization(deoptimization_environment);
598 RecordSafepoint(instr->pointer_map(),
599 deoptimization_environment->deoptimization_index());
600}
601
602
603void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
604 if (!environment->HasBeenRegistered()) {
605 // Physical stack frame layout:
606 // -x ............. -4 0 ..................................... y
607 // [incoming arguments] [spill slots] [pushed outgoing arguments]
608
609 // Layout of the environment:
610 // 0 ..................................................... size-1
611 // [parameters] [locals] [expression stack including arguments]
612
613 // Layout of the translation:
614 // 0 ........................................................ size - 1 + 4
615 // [expression stack including arguments] [locals] [4 words] [parameters]
616 // |>------------ translation_size ------------<|
617
618 int frame_count = 0;
619 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
620 ++frame_count;
621 }
622 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100623 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100624 int deoptimization_index = deoptimizations_.length();
625 environment->Register(deoptimization_index, translation.index());
626 deoptimizations_.Add(environment);
627 }
628}
629
630
631void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
632 RegisterEnvironmentForDeoptimization(environment);
633 ASSERT(environment->HasBeenRegistered());
634 int id = environment->deoptimization_index();
635 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
636 ASSERT(entry != NULL);
637 if (entry == NULL) {
638 Abort("bailout was not prepared");
639 return;
640 }
641
642 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
643
644 if (FLAG_deopt_every_n_times == 1 &&
645 info_->shared_info()->opt_count() == id) {
646 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
647 return;
648 }
649
Steve Block1e0659c2011-05-24 12:43:12 +0100650 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100651 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt");
652 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
653 } else {
654 if (FLAG_trap_on_deopt) {
655 Label done;
656 __ b(&done, NegateCondition(cc));
657 __ stop("trap_on_deopt");
658 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
659 __ bind(&done);
660 } else {
661 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc);
662 }
663 }
664}
665
666
667void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
668 int length = deoptimizations_.length();
669 if (length == 0) return;
670 ASSERT(FLAG_deopt);
671 Handle<DeoptimizationInputData> data =
672 Factory::NewDeoptimizationInputData(length, TENURED);
673
674 data->SetTranslationByteArray(*translations_.CreateByteArray());
675 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
676
677 Handle<FixedArray> literals =
678 Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
679 for (int i = 0; i < deoptimization_literals_.length(); i++) {
680 literals->set(i, *deoptimization_literals_[i]);
681 }
682 data->SetLiteralArray(*literals);
683
684 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
685 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
686
687 // Populate the deoptimization entries.
688 for (int i = 0; i < length; i++) {
689 LEnvironment* env = deoptimizations_[i];
690 data->SetAstId(i, Smi::FromInt(env->ast_id()));
691 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
692 data->SetArgumentsStackHeight(i,
693 Smi::FromInt(env->arguments_stack_height()));
694 }
695 code->set_deoptimization_data(*data);
696}
697
698
699int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
700 int result = deoptimization_literals_.length();
701 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
702 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
703 }
704 deoptimization_literals_.Add(literal);
705 return result;
706}
707
708
709void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
710 ASSERT(deoptimization_literals_.length() == 0);
711
712 const ZoneList<Handle<JSFunction> >* inlined_closures =
713 chunk()->inlined_closures();
714
715 for (int i = 0, length = inlined_closures->length();
716 i < length;
717 i++) {
718 DefineDeoptimizationLiteral(inlined_closures->at(i));
719 }
720
721 inlined_function_count_ = deoptimization_literals_.length();
722}
723
724
Steve Block1e0659c2011-05-24 12:43:12 +0100725void LCodeGen::RecordSafepoint(
726 LPointerMap* pointers,
727 Safepoint::Kind kind,
728 int arguments,
729 int deoptimization_index) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100730 const ZoneList<LOperand*>* operands = pointers->operands();
731 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100732 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100733 for (int i = 0; i < operands->length(); i++) {
734 LOperand* pointer = operands->at(i);
735 if (pointer->IsStackSlot()) {
736 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100737 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
738 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100739 }
740 }
Steve Block1e0659c2011-05-24 12:43:12 +0100741 if (kind & Safepoint::kWithRegisters) {
742 // Register cp always contains a pointer to the context.
743 safepoint.DefinePointerRegister(cp);
744 }
745}
746
747
748void LCodeGen::RecordSafepoint(LPointerMap* pointers,
749 int deoptimization_index) {
750 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100751}
752
753
754void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
755 int arguments,
756 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100757 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
758 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100759}
760
761
Ben Murdochb8e0da22011-05-16 14:20:40 +0100762void LCodeGen::RecordSafepointWithRegistersAndDoubles(
763 LPointerMap* pointers,
764 int arguments,
765 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100766 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
767 deoptimization_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100768}
769
770
Ben Murdochb0fe1622011-05-05 13:52:32 +0100771void LCodeGen::RecordPosition(int position) {
772 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
773 masm()->positions_recorder()->RecordPosition(position);
774}
775
776
777void LCodeGen::DoLabel(LLabel* label) {
778 if (label->is_loop_header()) {
779 Comment(";;; B%d - LOOP entry", label->block_id());
780 } else {
781 Comment(";;; B%d", label->block_id());
782 }
783 __ bind(label->label());
784 current_block_ = label->block_id();
785 LCodeGen::DoGap(label);
786}
787
788
789void LCodeGen::DoParallelMove(LParallelMove* move) {
790 // d0 must always be a scratch register.
791 DoubleRegister dbl_scratch = d0;
792 LUnallocated marker_operand(LUnallocated::NONE);
793
Steve Block9fac8402011-05-12 15:51:54 +0100794 Register core_scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100795 bool destroys_core_scratch = false;
796
Ben Murdochb8e0da22011-05-16 14:20:40 +0100797 const ZoneList<LMoveOperands>* moves =
798 resolver_.Resolve(move->move_operands(), &marker_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100799 for (int i = moves->length() - 1; i >= 0; --i) {
800 LMoveOperands move = moves->at(i);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100801 LOperand* from = move.source();
802 LOperand* to = move.destination();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100803 ASSERT(!from->IsDoubleRegister() ||
804 !ToDoubleRegister(from).is(dbl_scratch));
805 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(dbl_scratch));
806 ASSERT(!from->IsRegister() || !ToRegister(from).is(core_scratch));
807 ASSERT(!to->IsRegister() || !ToRegister(to).is(core_scratch));
808 if (from == &marker_operand) {
809 if (to->IsRegister()) {
810 __ mov(ToRegister(to), core_scratch);
811 ASSERT(destroys_core_scratch);
812 } else if (to->IsStackSlot()) {
813 __ str(core_scratch, ToMemOperand(to));
814 ASSERT(destroys_core_scratch);
815 } else if (to->IsDoubleRegister()) {
816 __ vmov(ToDoubleRegister(to), dbl_scratch);
817 } else {
818 ASSERT(to->IsDoubleStackSlot());
819 // TODO(regis): Why is vstr not taking a MemOperand?
820 // __ vstr(dbl_scratch, ToMemOperand(to));
821 MemOperand to_operand = ToMemOperand(to);
822 __ vstr(dbl_scratch, to_operand.rn(), to_operand.offset());
823 }
824 } else if (to == &marker_operand) {
825 if (from->IsRegister() || from->IsConstantOperand()) {
826 __ mov(core_scratch, ToOperand(from));
827 destroys_core_scratch = true;
828 } else if (from->IsStackSlot()) {
829 __ ldr(core_scratch, ToMemOperand(from));
830 destroys_core_scratch = true;
831 } else if (from->IsDoubleRegister()) {
832 __ vmov(dbl_scratch, ToDoubleRegister(from));
833 } else {
834 ASSERT(from->IsDoubleStackSlot());
835 // TODO(regis): Why is vldr not taking a MemOperand?
836 // __ vldr(dbl_scratch, ToMemOperand(from));
837 MemOperand from_operand = ToMemOperand(from);
838 __ vldr(dbl_scratch, from_operand.rn(), from_operand.offset());
839 }
840 } else if (from->IsConstantOperand()) {
841 if (to->IsRegister()) {
842 __ mov(ToRegister(to), ToOperand(from));
843 } else {
844 ASSERT(to->IsStackSlot());
845 __ mov(ip, ToOperand(from));
846 __ str(ip, ToMemOperand(to));
847 }
848 } else if (from->IsRegister()) {
849 if (to->IsRegister()) {
850 __ mov(ToRegister(to), ToOperand(from));
851 } else {
852 ASSERT(to->IsStackSlot());
853 __ str(ToRegister(from), ToMemOperand(to));
854 }
855 } else if (to->IsRegister()) {
856 ASSERT(from->IsStackSlot());
857 __ ldr(ToRegister(to), ToMemOperand(from));
858 } else if (from->IsStackSlot()) {
859 ASSERT(to->IsStackSlot());
860 __ ldr(ip, ToMemOperand(from));
861 __ str(ip, ToMemOperand(to));
862 } else if (from->IsDoubleRegister()) {
863 if (to->IsDoubleRegister()) {
864 __ vmov(ToDoubleRegister(to), ToDoubleRegister(from));
865 } else {
866 ASSERT(to->IsDoubleStackSlot());
867 // TODO(regis): Why is vstr not taking a MemOperand?
868 // __ vstr(dbl_scratch, ToMemOperand(to));
869 MemOperand to_operand = ToMemOperand(to);
870 __ vstr(ToDoubleRegister(from), to_operand.rn(), to_operand.offset());
871 }
872 } else if (to->IsDoubleRegister()) {
873 ASSERT(from->IsDoubleStackSlot());
874 // TODO(regis): Why is vldr not taking a MemOperand?
875 // __ vldr(ToDoubleRegister(to), ToMemOperand(from));
876 MemOperand from_operand = ToMemOperand(from);
877 __ vldr(ToDoubleRegister(to), from_operand.rn(), from_operand.offset());
878 } else {
879 ASSERT(to->IsDoubleStackSlot() && from->IsDoubleStackSlot());
880 // TODO(regis): Why is vldr not taking a MemOperand?
881 // __ vldr(dbl_scratch, ToMemOperand(from));
882 MemOperand from_operand = ToMemOperand(from);
883 __ vldr(dbl_scratch, from_operand.rn(), from_operand.offset());
884 // TODO(regis): Why is vstr not taking a MemOperand?
885 // __ vstr(dbl_scratch, ToMemOperand(to));
886 MemOperand to_operand = ToMemOperand(to);
887 __ vstr(dbl_scratch, to_operand.rn(), to_operand.offset());
888 }
889 }
890
891 if (destroys_core_scratch) {
892 __ ldr(core_scratch, MemOperand(fp, -kPointerSize));
893 }
894
895 LInstruction* next = GetNextInstruction();
896 if (next != NULL && next->IsLazyBailout()) {
897 int pc = masm()->pc_offset();
898 safepoints_.SetPcAfterGap(pc);
899 }
900}
901
902
903void LCodeGen::DoGap(LGap* gap) {
904 for (int i = LGap::FIRST_INNER_POSITION;
905 i <= LGap::LAST_INNER_POSITION;
906 i++) {
907 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
908 LParallelMove* move = gap->GetParallelMove(inner_pos);
909 if (move != NULL) DoParallelMove(move);
910 }
911
912 LInstruction* next = GetNextInstruction();
913 if (next != NULL && next->IsLazyBailout()) {
914 int pc = masm()->pc_offset();
915 safepoints_.SetPcAfterGap(pc);
916 }
917}
918
919
920void LCodeGen::DoParameter(LParameter* instr) {
921 // Nothing to do.
922}
923
924
925void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100926 ASSERT(ToRegister(instr->result()).is(r0));
927 switch (instr->hydrogen()->major_key()) {
928 case CodeStub::RegExpConstructResult: {
929 RegExpConstructResultStub stub;
930 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
931 break;
932 }
933 case CodeStub::RegExpExec: {
934 RegExpExecStub stub;
935 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
936 break;
937 }
938 case CodeStub::SubString: {
939 SubStringStub stub;
940 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
941 break;
942 }
943 case CodeStub::StringCharAt: {
Steve Block1e0659c2011-05-24 12:43:12 +0100944 StringCharAtStub stub;
945 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100946 break;
947 }
948 case CodeStub::MathPow: {
949 Abort("MathPowStub unimplemented.");
950 break;
951 }
952 case CodeStub::NumberToString: {
953 NumberToStringStub stub;
954 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
955 break;
956 }
957 case CodeStub::StringAdd: {
958 StringAddStub stub(NO_STRING_ADD_FLAGS);
959 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
960 break;
961 }
962 case CodeStub::StringCompare: {
963 StringCompareStub stub;
964 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
965 break;
966 }
967 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100968 __ ldr(r0, MemOperand(sp, 0));
969 TranscendentalCacheStub stub(instr->transcendental_type());
970 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100971 break;
972 }
973 default:
974 UNREACHABLE();
975 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100976}
977
978
979void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
980 // Nothing to do.
981}
982
983
984void LCodeGen::DoModI(LModI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100985 class DeferredModI: public LDeferredCode {
986 public:
987 DeferredModI(LCodeGen* codegen, LModI* instr)
988 : LDeferredCode(codegen), instr_(instr) { }
989 virtual void Generate() {
990 codegen()->DoDeferredGenericBinaryStub(instr_, Token::MOD);
991 }
992 private:
993 LModI* instr_;
994 };
995 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100996 Register left = ToRegister(instr->InputAt(0));
997 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100998 Register result = ToRegister(instr->result());
999 Register scratch = scratch0();
1000
1001 Label deoptimize, done;
1002 // Check for x % 0.
1003 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
1004 __ tst(right, Operand(right));
1005 __ b(eq, &deoptimize);
1006 }
1007
1008 // Check for (0 % -x) that will produce negative zero.
1009 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1010 Label ok;
1011 __ tst(left, Operand(left));
1012 __ b(ne, &ok);
1013 __ tst(right, Operand(right));
1014 __ b(pl, &ok);
1015 __ b(al, &deoptimize);
1016 __ bind(&ok);
1017 }
1018
Steve Block1e0659c2011-05-24 12:43:12 +01001019 // Try a few common cases before using the generic stub.
1020 Label call_stub;
1021 const int kUnfolds = 3;
1022 // Skip if either side is negative.
1023 __ cmp(left, Operand(0));
1024 __ cmp(right, Operand(0), NegateCondition(mi));
1025 __ b(mi, &call_stub);
1026 // If the right hand side is smaller than the (nonnegative)
1027 // left hand side, it is the result. Else try a few subtractions
1028 // of the left hand side.
1029 __ mov(scratch, left);
1030 for (int i = 0; i < kUnfolds; i++) {
1031 // Check if the left hand side is less or equal than the
1032 // the right hand side.
1033 __ cmp(scratch, right);
1034 __ mov(result, scratch, LeaveCC, lt);
1035 __ b(lt, &done);
1036 // If not, reduce the left hand side by the right hand
1037 // side and check again.
1038 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
1039 }
1040
1041 // Check for power of two on the right hand side.
1042 __ JumpIfNotPowerOfTwoOrZero(right, scratch, &call_stub);
1043 // Perform modulo operation (scratch contains right - 1).
1044 __ and_(result, scratch, Operand(left));
1045
1046 __ bind(&call_stub);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001047 // Call the generic stub. The numbers in r0 and r1 have
1048 // to be tagged to Smis. If that is not possible, deoptimize.
1049 DeferredModI* deferred = new DeferredModI(this, instr);
1050 __ TrySmiTag(left, &deoptimize, scratch);
1051 __ TrySmiTag(right, &deoptimize, scratch);
1052
1053 __ b(al, deferred->entry());
1054 __ bind(deferred->exit());
1055
1056 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001057 __ JumpIfNotSmi(result, &deoptimize);
1058 __ SmiUntag(result);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001059
1060 __ b(al, &done);
1061 __ bind(&deoptimize);
1062 DeoptimizeIf(al, instr->environment());
1063 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001064}
1065
1066
1067void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001068 class DeferredDivI: public LDeferredCode {
1069 public:
1070 DeferredDivI(LCodeGen* codegen, LDivI* instr)
1071 : LDeferredCode(codegen), instr_(instr) { }
1072 virtual void Generate() {
1073 codegen()->DoDeferredGenericBinaryStub(instr_, Token::DIV);
1074 }
1075 private:
1076 LDivI* instr_;
1077 };
1078
Steve Block1e0659c2011-05-24 12:43:12 +01001079 const Register left = ToRegister(instr->InputAt(0));
1080 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001081 const Register scratch = scratch0();
1082 const Register result = ToRegister(instr->result());
1083
1084 // Check for x / 0.
1085 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
1086 __ tst(right, right);
1087 DeoptimizeIf(eq, instr->environment());
1088 }
1089
1090 // Check for (0 / -x) that will produce negative zero.
1091 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1092 Label left_not_zero;
1093 __ tst(left, Operand(left));
1094 __ b(ne, &left_not_zero);
1095 __ tst(right, Operand(right));
1096 DeoptimizeIf(mi, instr->environment());
1097 __ bind(&left_not_zero);
1098 }
1099
1100 // Check for (-kMinInt / -1).
1101 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1102 Label left_not_min_int;
1103 __ cmp(left, Operand(kMinInt));
1104 __ b(ne, &left_not_min_int);
1105 __ cmp(right, Operand(-1));
1106 DeoptimizeIf(eq, instr->environment());
1107 __ bind(&left_not_min_int);
1108 }
1109
1110 Label done, deoptimize;
1111 // Test for a few common cases first.
1112 __ cmp(right, Operand(1));
1113 __ mov(result, left, LeaveCC, eq);
1114 __ b(eq, &done);
1115
1116 __ cmp(right, Operand(2));
1117 __ tst(left, Operand(1), eq);
1118 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1119 __ b(eq, &done);
1120
1121 __ cmp(right, Operand(4));
1122 __ tst(left, Operand(3), eq);
1123 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1124 __ b(eq, &done);
1125
1126 // Call the generic stub. The numbers in r0 and r1 have
1127 // to be tagged to Smis. If that is not possible, deoptimize.
1128 DeferredDivI* deferred = new DeferredDivI(this, instr);
1129
1130 __ TrySmiTag(left, &deoptimize, scratch);
1131 __ TrySmiTag(right, &deoptimize, scratch);
1132
1133 __ b(al, deferred->entry());
1134 __ bind(deferred->exit());
1135
1136 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001137 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001138 __ SmiUntag(result);
1139 __ b(&done);
1140
1141 __ bind(&deoptimize);
1142 DeoptimizeIf(al, instr->environment());
1143 __ bind(&done);
1144}
1145
1146
Steve Block1e0659c2011-05-24 12:43:12 +01001147template<int T>
1148void LCodeGen::DoDeferredGenericBinaryStub(LTemplateInstruction<1, 2, T>* instr,
Ben Murdochb8e0da22011-05-16 14:20:40 +01001149 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001150 Register left = ToRegister(instr->InputAt(0));
1151 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001152
1153 __ PushSafepointRegistersAndDoubles();
1154 GenericBinaryOpStub stub(op, OVERWRITE_LEFT, left, right);
1155 __ CallStub(&stub);
1156 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1157 0,
1158 Safepoint::kNoDeoptimizationIndex);
1159 // Overwrite the stored value of r0 with the result of the stub.
Steve Block1e0659c2011-05-24 12:43:12 +01001160 __ StoreToSafepointRegistersAndDoublesSlot(r0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001161 __ PopSafepointRegistersAndDoubles();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001162}
1163
1164
1165void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001166 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001167 Register left = ToRegister(instr->InputAt(0));
1168 Register right = EmitLoadRegister(instr->InputAt(1), scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001169
1170 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
Steve Block1e0659c2011-05-24 12:43:12 +01001171 !instr->InputAt(1)->IsConstantOperand()) {
1172 __ orr(ToRegister(instr->TempAt(0)), left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001173 }
1174
1175 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1176 // scratch:left = left * right.
Steve Block1e0659c2011-05-24 12:43:12 +01001177 __ smull(left, scratch, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001178 __ mov(ip, Operand(left, ASR, 31));
1179 __ cmp(ip, Operand(scratch));
1180 DeoptimizeIf(ne, instr->environment());
1181 } else {
1182 __ mul(left, left, right);
1183 }
1184
1185 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1186 // Bail out if the result is supposed to be negative zero.
1187 Label done;
1188 __ tst(left, Operand(left));
1189 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001190 if (instr->InputAt(1)->IsConstantOperand()) {
1191 if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) <= 0) {
1192 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001193 }
1194 } else {
1195 // Test the non-zero operand for negative sign.
Steve Block1e0659c2011-05-24 12:43:12 +01001196 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001197 DeoptimizeIf(mi, instr->environment());
1198 }
1199 __ bind(&done);
1200 }
1201}
1202
1203
1204void LCodeGen::DoBitI(LBitI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001205 LOperand* left = instr->InputAt(0);
1206 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001207 ASSERT(left->Equals(instr->result()));
1208 ASSERT(left->IsRegister());
1209 Register result = ToRegister(left);
1210 Register right_reg = EmitLoadRegister(right, ip);
1211 switch (instr->op()) {
1212 case Token::BIT_AND:
1213 __ and_(result, ToRegister(left), Operand(right_reg));
1214 break;
1215 case Token::BIT_OR:
1216 __ orr(result, ToRegister(left), Operand(right_reg));
1217 break;
1218 case Token::BIT_XOR:
1219 __ eor(result, ToRegister(left), Operand(right_reg));
1220 break;
1221 default:
1222 UNREACHABLE();
1223 break;
1224 }
1225}
1226
1227
1228void LCodeGen::DoShiftI(LShiftI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001229 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001230 LOperand* left = instr->InputAt(0);
1231 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001232 ASSERT(left->Equals(instr->result()));
1233 ASSERT(left->IsRegister());
1234 Register result = ToRegister(left);
1235 if (right->IsRegister()) {
1236 // Mask the right operand.
Steve Block9fac8402011-05-12 15:51:54 +01001237 __ and_(scratch, ToRegister(right), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001238 switch (instr->op()) {
1239 case Token::SAR:
Steve Block9fac8402011-05-12 15:51:54 +01001240 __ mov(result, Operand(result, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001241 break;
1242 case Token::SHR:
1243 if (instr->can_deopt()) {
Steve Block9fac8402011-05-12 15:51:54 +01001244 __ mov(result, Operand(result, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001245 DeoptimizeIf(mi, instr->environment());
1246 } else {
Steve Block9fac8402011-05-12 15:51:54 +01001247 __ mov(result, Operand(result, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001248 }
1249 break;
1250 case Token::SHL:
Steve Block9fac8402011-05-12 15:51:54 +01001251 __ mov(result, Operand(result, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001252 break;
1253 default:
1254 UNREACHABLE();
1255 break;
1256 }
1257 } else {
1258 int value = ToInteger32(LConstantOperand::cast(right));
1259 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1260 switch (instr->op()) {
1261 case Token::SAR:
1262 if (shift_count != 0) {
1263 __ mov(result, Operand(result, ASR, shift_count));
1264 }
1265 break;
1266 case Token::SHR:
1267 if (shift_count == 0 && instr->can_deopt()) {
1268 __ tst(result, Operand(0x80000000));
1269 DeoptimizeIf(ne, instr->environment());
1270 } else {
1271 __ mov(result, Operand(result, LSR, shift_count));
1272 }
1273 break;
1274 case Token::SHL:
1275 if (shift_count != 0) {
1276 __ mov(result, Operand(result, LSL, shift_count));
1277 }
1278 break;
1279 default:
1280 UNREACHABLE();
1281 break;
1282 }
1283 }
1284}
1285
1286
1287void LCodeGen::DoSubI(LSubI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001288 Register left = ToRegister(instr->InputAt(0));
1289 Register right = EmitLoadRegister(instr->InputAt(1), ip);
1290 ASSERT(instr->InputAt(0)->Equals(instr->result()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001291 __ sub(left, left, right, SetCC);
1292 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1293 DeoptimizeIf(vs, instr->environment());
1294 }
1295}
1296
1297
1298void LCodeGen::DoConstantI(LConstantI* instr) {
1299 ASSERT(instr->result()->IsRegister());
1300 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1301}
1302
1303
1304void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001305 ASSERT(instr->result()->IsDoubleRegister());
1306 DwVfpRegister result = ToDoubleRegister(instr->result());
1307 double v = instr->value();
1308 __ vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001309}
1310
1311
1312void LCodeGen::DoConstantT(LConstantT* instr) {
1313 ASSERT(instr->result()->IsRegister());
1314 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1315}
1316
1317
Steve Block9fac8402011-05-12 15:51:54 +01001318void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001319 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001320 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001321 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1322}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001323
Ben Murdochb0fe1622011-05-05 13:52:32 +01001324
Steve Block1e0659c2011-05-24 12:43:12 +01001325void LCodeGen::DoPixelArrayLength(LPixelArrayLength* instr) {
1326 Register result = ToRegister(instr->result());
1327 Register array = ToRegister(instr->InputAt(0));
1328 __ ldr(result, FieldMemOperand(array, PixelArray::kLengthOffset));
1329}
1330
1331
Steve Block9fac8402011-05-12 15:51:54 +01001332void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1333 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001334 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001335 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001336}
1337
1338
1339void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001340 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001341 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001342 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001343 ASSERT(input.is(result));
1344 Label done;
1345
1346 // If the object is a smi return the object.
1347 __ tst(input, Operand(kSmiTagMask));
1348 __ b(eq, &done);
1349
1350 // If the object is not a value type, return the object.
1351 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1352 __ b(ne, &done);
1353 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1354
1355 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001356}
1357
1358
1359void LCodeGen::DoBitNotI(LBitNotI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001360 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001361 ASSERT(input->Equals(instr->result()));
1362 __ mvn(ToRegister(input), Operand(ToRegister(input)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001363}
1364
1365
1366void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001367 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001368 __ push(input_reg);
1369 CallRuntime(Runtime::kThrow, 1, instr);
1370
1371 if (FLAG_debug_code) {
1372 __ stop("Unreachable code.");
1373 }
1374}
1375
1376
1377void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001378 LOperand* left = instr->InputAt(0);
1379 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001380 ASSERT(left->Equals(instr->result()));
1381
1382 Register right_reg = EmitLoadRegister(right, ip);
1383 __ add(ToRegister(left), ToRegister(left), Operand(right_reg), SetCC);
1384
1385 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1386 DeoptimizeIf(vs, instr->environment());
1387 }
1388}
1389
1390
1391void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001392 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1393 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001394 switch (instr->op()) {
1395 case Token::ADD:
1396 __ vadd(left, left, right);
1397 break;
1398 case Token::SUB:
1399 __ vsub(left, left, right);
1400 break;
1401 case Token::MUL:
1402 __ vmul(left, left, right);
1403 break;
1404 case Token::DIV:
1405 __ vdiv(left, left, right);
1406 break;
1407 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001408 // Save r0-r3 on the stack.
1409 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1410
1411 __ PrepareCallCFunction(4, scratch0());
1412 __ vmov(r0, r1, left);
1413 __ vmov(r2, r3, right);
1414 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4);
1415 // Move the result in the double result register.
1416 __ vmov(ToDoubleRegister(instr->result()), r0, r1);
1417
1418 // Restore r0-r3.
1419 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001420 break;
1421 }
1422 default:
1423 UNREACHABLE();
1424 break;
1425 }
1426}
1427
1428
1429void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001430 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1431 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001432 ASSERT(ToRegister(instr->result()).is(r0));
1433
1434 // TODO(regis): Implement TypeRecordingBinaryOpStub and replace current
1435 // GenericBinaryOpStub:
1436 // TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1437 GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, r1, r0);
1438 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1439}
1440
1441
1442int LCodeGen::GetNextEmittedBlock(int block) {
1443 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1444 LLabel* label = chunk_->GetLabel(i);
1445 if (!label->HasReplacement()) return i;
1446 }
1447 return -1;
1448}
1449
1450
1451void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1452 int next_block = GetNextEmittedBlock(current_block_);
1453 right_block = chunk_->LookupDestination(right_block);
1454 left_block = chunk_->LookupDestination(left_block);
1455
1456 if (right_block == left_block) {
1457 EmitGoto(left_block);
1458 } else if (left_block == next_block) {
1459 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1460 } else if (right_block == next_block) {
1461 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1462 } else {
1463 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1464 __ b(chunk_->GetAssemblyLabel(right_block));
1465 }
1466}
1467
1468
1469void LCodeGen::DoBranch(LBranch* instr) {
1470 int true_block = chunk_->LookupDestination(instr->true_block_id());
1471 int false_block = chunk_->LookupDestination(instr->false_block_id());
1472
1473 Representation r = instr->hydrogen()->representation();
1474 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001475 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001476 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001477 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001478 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001479 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001480 Register scratch = scratch0();
1481
Ben Murdochb8e0da22011-05-16 14:20:40 +01001482 // Test the double value. Zero and NaN are false.
1483 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1484 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001485 EmitBranch(true_block, false_block, ne);
1486 } else {
1487 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001488 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001489 if (instr->hydrogen()->type().IsBoolean()) {
1490 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1491 __ cmp(reg, ip);
1492 EmitBranch(true_block, false_block, eq);
1493 } else {
1494 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1495 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1496
1497 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1498 __ cmp(reg, ip);
1499 __ b(eq, false_label);
1500 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1501 __ cmp(reg, ip);
1502 __ b(eq, true_label);
1503 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1504 __ cmp(reg, ip);
1505 __ b(eq, false_label);
1506 __ cmp(reg, Operand(0));
1507 __ b(eq, false_label);
1508 __ tst(reg, Operand(kSmiTagMask));
1509 __ b(eq, true_label);
1510
Ben Murdochb8e0da22011-05-16 14:20:40 +01001511 // Test double values. Zero and NaN are false.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001512 Label call_stub;
1513 DoubleRegister dbl_scratch = d0;
Steve Block9fac8402011-05-12 15:51:54 +01001514 Register scratch = scratch0();
1515 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001516 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01001517 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001518 __ b(ne, &call_stub);
1519 __ sub(ip, reg, Operand(kHeapObjectTag));
1520 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001521 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1522 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001523 __ b(ne, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001524 __ b(true_label);
1525
1526 // The conversion stub doesn't cause garbage collections so it's
1527 // safe to not record a safepoint after the call.
1528 __ bind(&call_stub);
1529 ToBooleanStub stub(reg);
1530 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1531 __ stm(db_w, sp, saved_regs);
1532 __ CallStub(&stub);
1533 __ cmp(reg, Operand(0));
1534 __ ldm(ia_w, sp, saved_regs);
Steve Block1e0659c2011-05-24 12:43:12 +01001535 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001536 }
1537 }
1538}
1539
1540
1541void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001542 block = chunk_->LookupDestination(block);
1543 int next_block = GetNextEmittedBlock(current_block_);
1544 if (block != next_block) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001545 // Perform stack overflow check if this goto needs it before jumping.
1546 if (deferred_stack_check != NULL) {
1547 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1548 __ cmp(sp, Operand(ip));
1549 __ b(hs, chunk_->GetAssemblyLabel(block));
1550 __ jmp(deferred_stack_check->entry());
1551 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1552 } else {
1553 __ jmp(chunk_->GetAssemblyLabel(block));
1554 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001555 }
1556}
1557
1558
1559void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001560 __ PushSafepointRegisters();
1561 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1562 RecordSafepointWithRegisters(
1563 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1564 __ PopSafepointRegisters();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001565}
1566
1567
1568void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001569 class DeferredStackCheck: public LDeferredCode {
1570 public:
1571 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1572 : LDeferredCode(codegen), instr_(instr) { }
1573 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1574 private:
1575 LGoto* instr_;
1576 };
1577
1578 DeferredStackCheck* deferred = NULL;
1579 if (instr->include_stack_check()) {
1580 deferred = new DeferredStackCheck(this, instr);
1581 }
1582 EmitGoto(instr->block_id(), deferred);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001583}
1584
1585
1586Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001587 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001588 switch (op) {
1589 case Token::EQ:
1590 case Token::EQ_STRICT:
1591 cond = eq;
1592 break;
1593 case Token::LT:
1594 cond = is_unsigned ? lo : lt;
1595 break;
1596 case Token::GT:
1597 cond = is_unsigned ? hi : gt;
1598 break;
1599 case Token::LTE:
1600 cond = is_unsigned ? ls : le;
1601 break;
1602 case Token::GTE:
1603 cond = is_unsigned ? hs : ge;
1604 break;
1605 case Token::IN:
1606 case Token::INSTANCEOF:
1607 default:
1608 UNREACHABLE();
1609 }
1610 return cond;
1611}
1612
1613
1614void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001615 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001616}
1617
1618
1619void LCodeGen::DoCmpID(LCmpID* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001620 LOperand* left = instr->InputAt(0);
1621 LOperand* right = instr->InputAt(1);
1622 LOperand* result = instr->result();
1623 Register scratch = scratch0();
1624
1625 Label unordered, done;
1626 if (instr->is_double()) {
1627 // Compare left and right as doubles and load the
1628 // resulting flags into the normal status register.
1629 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1630 // If a NaN is involved, i.e. the result is unordered (V set),
1631 // jump to unordered to return false.
1632 __ b(vs, &unordered);
1633 } else {
1634 EmitCmpI(left, right);
1635 }
1636
1637 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1638 __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1639 __ b(cc, &done);
1640
1641 __ bind(&unordered);
1642 __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1643 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001644}
1645
1646
1647void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001648 LOperand* left = instr->InputAt(0);
1649 LOperand* right = instr->InputAt(1);
1650 int false_block = chunk_->LookupDestination(instr->false_block_id());
1651 int true_block = chunk_->LookupDestination(instr->true_block_id());
1652
1653 if (instr->is_double()) {
1654 // Compare left and right as doubles and load the
1655 // resulting flags into the normal status register.
1656 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1657 // If a NaN is involved, i.e. the result is unordered (V set),
1658 // jump to false block label.
1659 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1660 } else {
1661 EmitCmpI(left, right);
1662 }
1663
1664 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1665 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001666}
1667
1668
1669void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001670 Register left = ToRegister(instr->InputAt(0));
1671 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001672 Register result = ToRegister(instr->result());
1673
1674 __ cmp(left, Operand(right));
1675 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1676 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001677}
1678
1679
1680void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001681 Register left = ToRegister(instr->InputAt(0));
1682 Register right = ToRegister(instr->InputAt(1));
1683 int false_block = chunk_->LookupDestination(instr->false_block_id());
1684 int true_block = chunk_->LookupDestination(instr->true_block_id());
1685
1686 __ cmp(left, Operand(right));
1687 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001688}
1689
1690
1691void LCodeGen::DoIsNull(LIsNull* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001692 Register reg = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001693 Register result = ToRegister(instr->result());
1694
1695 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1696 __ cmp(reg, ip);
1697 if (instr->is_strict()) {
1698 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1699 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1700 } else {
1701 Label true_value, false_value, done;
1702 __ b(eq, &true_value);
1703 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1704 __ cmp(ip, reg);
1705 __ b(eq, &true_value);
1706 __ tst(reg, Operand(kSmiTagMask));
1707 __ b(eq, &false_value);
1708 // Check for undetectable objects by looking in the bit field in
1709 // the map. The object has already been smi checked.
1710 Register scratch = result;
1711 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1712 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1713 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1714 __ b(ne, &true_value);
1715 __ bind(&false_value);
1716 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1717 __ jmp(&done);
1718 __ bind(&true_value);
1719 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1720 __ bind(&done);
1721 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001722}
1723
1724
1725void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001726 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001727 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001728
1729 // TODO(fsc): If the expression is known to be a smi, then it's
1730 // definitely not null. Jump to the false block.
1731
1732 int true_block = chunk_->LookupDestination(instr->true_block_id());
1733 int false_block = chunk_->LookupDestination(instr->false_block_id());
1734
1735 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1736 __ cmp(reg, ip);
1737 if (instr->is_strict()) {
1738 EmitBranch(true_block, false_block, eq);
1739 } else {
1740 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1741 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1742 __ b(eq, true_label);
1743 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1744 __ cmp(reg, ip);
1745 __ b(eq, true_label);
1746 __ tst(reg, Operand(kSmiTagMask));
1747 __ b(eq, false_label);
1748 // Check for undetectable objects by looking in the bit field in
1749 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001750 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1751 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1752 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1753 EmitBranch(true_block, false_block, ne);
1754 }
1755}
1756
1757
1758Condition LCodeGen::EmitIsObject(Register input,
1759 Register temp1,
1760 Register temp2,
1761 Label* is_not_object,
1762 Label* is_object) {
Steve Block1e0659c2011-05-24 12:43:12 +01001763 __ JumpIfSmi(input, is_not_object);
1764
1765 __ LoadRoot(temp1, Heap::kNullValueRootIndex);
1766 __ cmp(input, temp1);
1767 __ b(eq, is_object);
1768
1769 // Load map.
1770 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1771 // Undetectable objects behave like undefined.
1772 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1773 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1774 __ b(ne, is_not_object);
1775
1776 // Load instance type and check that it is in object type range.
1777 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
1778 __ cmp(temp2, Operand(FIRST_JS_OBJECT_TYPE));
1779 __ b(lt, is_not_object);
1780 __ cmp(temp2, Operand(LAST_JS_OBJECT_TYPE));
1781 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001782}
1783
1784
1785void LCodeGen::DoIsObject(LIsObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001786 Register reg = ToRegister(instr->InputAt(0));
1787 Register result = ToRegister(instr->result());
1788 Register temp = scratch0();
1789 Label is_false, is_true, done;
1790
1791 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1792 __ b(true_cond, &is_true);
1793
1794 __ bind(&is_false);
1795 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1796 __ b(&done);
1797
1798 __ bind(&is_true);
1799 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1800
1801 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001802}
1803
1804
1805void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001806 Register reg = ToRegister(instr->InputAt(0));
1807 Register temp1 = ToRegister(instr->TempAt(0));
1808 Register temp2 = scratch0();
1809
1810 int true_block = chunk_->LookupDestination(instr->true_block_id());
1811 int false_block = chunk_->LookupDestination(instr->false_block_id());
1812 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1813 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1814
1815 Condition true_cond =
1816 EmitIsObject(reg, temp1, temp2, false_label, true_label);
1817
1818 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001819}
1820
1821
1822void LCodeGen::DoIsSmi(LIsSmi* instr) {
1823 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1824 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001825 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001826 __ tst(input_reg, Operand(kSmiTagMask));
1827 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1828 Label done;
1829 __ b(eq, &done);
1830 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1831 __ bind(&done);
1832}
1833
1834
1835void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1836 int true_block = chunk_->LookupDestination(instr->true_block_id());
1837 int false_block = chunk_->LookupDestination(instr->false_block_id());
1838
Steve Block1e0659c2011-05-24 12:43:12 +01001839 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001840 __ tst(input_reg, Operand(kSmiTagMask));
1841 EmitBranch(true_block, false_block, eq);
1842}
1843
1844
Steve Block1e0659c2011-05-24 12:43:12 +01001845static InstanceType TestType(HHasInstanceType* instr) {
1846 InstanceType from = instr->from();
1847 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001848 if (from == FIRST_TYPE) return to;
1849 ASSERT(from == to || to == LAST_TYPE);
1850 return from;
1851}
1852
1853
Steve Block1e0659c2011-05-24 12:43:12 +01001854static Condition BranchCondition(HHasInstanceType* instr) {
1855 InstanceType from = instr->from();
1856 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001857 if (from == to) return eq;
1858 if (to == LAST_TYPE) return hs;
1859 if (from == FIRST_TYPE) return ls;
1860 UNREACHABLE();
1861 return eq;
1862}
1863
1864
1865void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001866 Register input = ToRegister(instr->InputAt(0));
1867 Register result = ToRegister(instr->result());
1868
1869 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1870 Label done;
1871 __ tst(input, Operand(kSmiTagMask));
1872 __ LoadRoot(result, Heap::kFalseValueRootIndex, eq);
1873 __ b(eq, &done);
1874 __ CompareObjectType(input, result, result, TestType(instr->hydrogen()));
1875 Condition cond = BranchCondition(instr->hydrogen());
1876 __ LoadRoot(result, Heap::kTrueValueRootIndex, cond);
1877 __ LoadRoot(result, Heap::kFalseValueRootIndex, NegateCondition(cond));
1878 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001879}
1880
1881
1882void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001883 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001884 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001885
1886 int true_block = chunk_->LookupDestination(instr->true_block_id());
1887 int false_block = chunk_->LookupDestination(instr->false_block_id());
1888
1889 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1890
1891 __ tst(input, Operand(kSmiTagMask));
1892 __ b(eq, false_label);
1893
Steve Block1e0659c2011-05-24 12:43:12 +01001894 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1895 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001896}
1897
1898
1899void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1900 Abort("DoHasCachedArrayIndex unimplemented.");
1901}
1902
1903
1904void LCodeGen::DoHasCachedArrayIndexAndBranch(
1905 LHasCachedArrayIndexAndBranch* instr) {
1906 Abort("DoHasCachedArrayIndexAndBranch unimplemented.");
1907}
1908
1909
Ben Murdochb8e0da22011-05-16 14:20:40 +01001910// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001911// the temp registers, but not the input. Only input and temp2 may alias.
1912void LCodeGen::EmitClassOfTest(Label* is_true,
1913 Label* is_false,
1914 Handle<String>class_name,
1915 Register input,
1916 Register temp,
1917 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001918 ASSERT(!input.is(temp));
1919 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1920 __ tst(input, Operand(kSmiTagMask));
1921 __ b(eq, is_false);
1922 __ CompareObjectType(input, temp, temp2, FIRST_JS_OBJECT_TYPE);
1923 __ b(lt, is_false);
1924
1925 // Map is now in temp.
1926 // Functions have class 'Function'.
1927 __ CompareInstanceType(temp, temp2, JS_FUNCTION_TYPE);
1928 if (class_name->IsEqualTo(CStrVector("Function"))) {
1929 __ b(eq, is_true);
1930 } else {
1931 __ b(eq, is_false);
1932 }
1933
1934 // Check if the constructor in the map is a function.
1935 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1936
1937 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1938 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1939 // LAST_JS_OBJECT_TYPE.
1940 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1941 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1942
1943 // Objects with a non-function constructor have class 'Object'.
1944 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1945 if (class_name->IsEqualTo(CStrVector("Object"))) {
1946 __ b(ne, is_true);
1947 } else {
1948 __ b(ne, is_false);
1949 }
1950
1951 // temp now contains the constructor function. Grab the
1952 // instance class name from there.
1953 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1954 __ ldr(temp, FieldMemOperand(temp,
1955 SharedFunctionInfo::kInstanceClassNameOffset));
1956 // The class name we are testing against is a symbol because it's a literal.
1957 // The name in the constructor is a symbol because of the way the context is
1958 // booted. This routine isn't expected to work for random API-created
1959 // classes and it doesn't have to because you can't access it with natives
1960 // syntax. Since both sides are symbols it is sufficient to use an identity
1961 // comparison.
1962 __ cmp(temp, Operand(class_name));
1963 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001964}
1965
1966
1967void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001968 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001969 Register result = ToRegister(instr->result());
1970 ASSERT(input.is(result));
1971 Handle<String> class_name = instr->hydrogen()->class_name();
1972
1973 Label done, is_true, is_false;
1974
1975 EmitClassOfTest(&is_true, &is_false, class_name, input, scratch0(), input);
1976 __ b(ne, &is_false);
1977
1978 __ bind(&is_true);
1979 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1980 __ jmp(&done);
1981
1982 __ bind(&is_false);
1983 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1984 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001985}
1986
1987
1988void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001989 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001990 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001991 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001992 Handle<String> class_name = instr->hydrogen()->class_name();
1993
1994 int true_block = chunk_->LookupDestination(instr->true_block_id());
1995 int false_block = chunk_->LookupDestination(instr->false_block_id());
1996
1997 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1998 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1999
2000 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
2001
2002 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002003}
2004
2005
2006void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002007 Register reg = ToRegister(instr->InputAt(0));
2008 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002009 int true_block = instr->true_block_id();
2010 int false_block = instr->false_block_id();
2011
2012 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
2013 __ cmp(temp, Operand(instr->map()));
2014 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002015}
2016
2017
2018void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002019 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
2020 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01002021
Ben Murdochb0fe1622011-05-05 13:52:32 +01002022 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2023 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2024
2025 Label true_value, done;
2026 __ tst(r0, r0);
Steve Block9fac8402011-05-12 15:51:54 +01002027 __ mov(r0, Operand(Factory::false_value()), LeaveCC, ne);
2028 __ mov(r0, Operand(Factory::true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002029}
2030
2031
2032void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002033 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
2034 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
2035
2036 int true_block = chunk_->LookupDestination(instr->true_block_id());
2037 int false_block = chunk_->LookupDestination(instr->false_block_id());
2038
2039 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2040 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2041 __ tst(r0, Operand(r0));
2042 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002043}
2044
2045
Ben Murdoch086aeea2011-05-13 15:57:08 +01002046void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002047 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2048 public:
2049 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2050 LInstanceOfKnownGlobal* instr)
2051 : LDeferredCode(codegen), instr_(instr) { }
2052 virtual void Generate() {
2053 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
2054 }
2055
2056 Label* map_check() { return &map_check_; }
2057
2058 private:
2059 LInstanceOfKnownGlobal* instr_;
2060 Label map_check_;
2061 };
2062
2063 DeferredInstanceOfKnownGlobal* deferred;
2064 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2065
2066 Label done, false_result;
2067 Register object = ToRegister(instr->InputAt(0));
2068 Register temp = ToRegister(instr->TempAt(0));
2069 Register result = ToRegister(instr->result());
2070
2071 ASSERT(object.is(r0));
2072 ASSERT(result.is(r0));
2073
2074 // A Smi is not instance of anything.
2075 __ JumpIfSmi(object, &false_result);
2076
2077 // This is the inlined call site instanceof cache. The two occurences of the
2078 // hole value will be patched to the last map/result pair generated by the
2079 // instanceof stub.
2080 Label cache_miss;
2081 Register map = temp;
2082 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2083 __ bind(deferred->map_check()); // Label for calculating code patching.
2084 // We use Factory::the_hole_value() on purpose instead of loading from the
2085 // root array to force relocation to be able to later patch with
2086 // the cached map.
2087 __ mov(ip, Operand(Factory::the_hole_value()));
2088 __ cmp(map, Operand(ip));
2089 __ b(ne, &cache_miss);
2090 // We use Factory::the_hole_value() on purpose instead of loading from the
2091 // root array to force relocation to be able to later patch
2092 // with true or false.
2093 __ mov(result, Operand(Factory::the_hole_value()));
2094 __ b(&done);
2095
2096 // The inlined call site cache did not match. Check null and string before
2097 // calling the deferred code.
2098 __ bind(&cache_miss);
2099 // Null is not instance of anything.
2100 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2101 __ cmp(object, Operand(ip));
2102 __ b(eq, &false_result);
2103
2104 // String values is not instance of anything.
2105 Condition is_string = masm_->IsObjectStringType(object, temp);
2106 __ b(is_string, &false_result);
2107
2108 // Go to the deferred code.
2109 __ b(deferred->entry());
2110
2111 __ bind(&false_result);
2112 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2113
2114 // Here result has either true or false. Deferred code also produces true or
2115 // false object.
2116 __ bind(deferred->exit());
2117 __ bind(&done);
2118}
2119
2120
2121void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2122 Label* map_check) {
2123 Register result = ToRegister(instr->result());
2124 ASSERT(result.is(r0));
2125
2126 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2127 flags = static_cast<InstanceofStub::Flags>(
2128 flags | InstanceofStub::kArgsInRegisters);
2129 flags = static_cast<InstanceofStub::Flags>(
2130 flags | InstanceofStub::kCallSiteInlineCheck);
2131 flags = static_cast<InstanceofStub::Flags>(
2132 flags | InstanceofStub::kReturnTrueFalseObject);
2133 InstanceofStub stub(flags);
2134
2135 __ PushSafepointRegisters();
2136
2137 // Get the temp register reserved by the instruction. This needs to be r4 as
2138 // its slot of the pushing of safepoint registers is used to communicate the
2139 // offset to the location of the map check.
2140 Register temp = ToRegister(instr->TempAt(0));
2141 ASSERT(temp.is(r4));
2142 __ mov(InstanceofStub::right(), Operand(instr->function()));
2143 static const int kAdditionalDelta = 4;
2144 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2145 Label before_push_delta;
2146 __ bind(&before_push_delta);
2147 __ BlockConstPoolFor(kAdditionalDelta);
2148 __ mov(temp, Operand(delta * kPointerSize));
2149 __ StoreToSafepointRegisterSlot(temp);
2150 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2151 ASSERT_EQ(kAdditionalDelta,
2152 masm_->InstructionsGeneratedSince(&before_push_delta));
2153 RecordSafepointWithRegisters(
2154 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2155 // Put the result value into the result register slot and
2156 // restore all registers.
2157 __ StoreToSafepointRegisterSlot(result);
2158
2159 __ PopSafepointRegisters();
Ben Murdoch086aeea2011-05-13 15:57:08 +01002160}
2161
Ben Murdochb0fe1622011-05-05 13:52:32 +01002162
2163static Condition ComputeCompareCondition(Token::Value op) {
2164 switch (op) {
2165 case Token::EQ_STRICT:
2166 case Token::EQ:
2167 return eq;
2168 case Token::LT:
2169 return lt;
2170 case Token::GT:
2171 return gt;
2172 case Token::LTE:
2173 return le;
2174 case Token::GTE:
2175 return ge;
2176 default:
2177 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002178 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002179 }
2180}
2181
2182
2183void LCodeGen::DoCmpT(LCmpT* instr) {
2184 Token::Value op = instr->op();
2185
2186 Handle<Code> ic = CompareIC::GetUninitialized(op);
2187 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002188 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002189
2190 Condition condition = ComputeCompareCondition(op);
2191 if (op == Token::GT || op == Token::LTE) {
2192 condition = ReverseCondition(condition);
2193 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002194 __ LoadRoot(ToRegister(instr->result()),
2195 Heap::kTrueValueRootIndex,
2196 condition);
2197 __ LoadRoot(ToRegister(instr->result()),
2198 Heap::kFalseValueRootIndex,
2199 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002200}
2201
2202
2203void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002204 Token::Value op = instr->op();
2205 int true_block = chunk_->LookupDestination(instr->true_block_id());
2206 int false_block = chunk_->LookupDestination(instr->false_block_id());
2207
2208 Handle<Code> ic = CompareIC::GetUninitialized(op);
2209 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2210
2211 // The compare stub expects compare condition and the input operands
2212 // reversed for GT and LTE.
2213 Condition condition = ComputeCompareCondition(op);
2214 if (op == Token::GT || op == Token::LTE) {
2215 condition = ReverseCondition(condition);
2216 }
2217 __ cmp(r0, Operand(0));
2218 EmitBranch(true_block, false_block, condition);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002219}
2220
2221
2222void LCodeGen::DoReturn(LReturn* instr) {
2223 if (FLAG_trace) {
2224 // Push the return value on the stack as the parameter.
2225 // Runtime::TraceExit returns its parameter in r0.
2226 __ push(r0);
2227 __ CallRuntime(Runtime::kTraceExit, 1);
2228 }
2229 int32_t sp_delta = (ParameterCount() + 1) * kPointerSize;
2230 __ mov(sp, fp);
2231 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2232 __ add(sp, sp, Operand(sp_delta));
2233 __ Jump(lr);
2234}
2235
2236
2237void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
2238 Register result = ToRegister(instr->result());
2239 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2240 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2241 if (instr->hydrogen()->check_hole_value()) {
2242 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2243 __ cmp(result, ip);
2244 DeoptimizeIf(eq, instr->environment());
2245 }
2246}
2247
2248
2249void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002250 Register value = ToRegister(instr->InputAt(0));
2251 Register scratch = scratch0();
2252
2253 // Load the cell.
2254 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2255
2256 // If the cell we are storing to contains the hole it could have
2257 // been deleted from the property dictionary. In that case, we need
2258 // to update the property details in the property dictionary to mark
2259 // it as no longer deleted.
2260 if (instr->hydrogen()->check_hole_value()) {
2261 Register scratch2 = ToRegister(instr->TempAt(0));
2262 __ ldr(scratch2,
2263 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2264 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2265 __ cmp(scratch2, ip);
2266 DeoptimizeIf(eq, instr->environment());
2267 }
2268
2269 // Store the value.
2270 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002271}
2272
2273
Ben Murdochb8e0da22011-05-16 14:20:40 +01002274void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002275 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002276 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002277 __ ldr(result,
2278 MemOperand(context, Context::SlotOffset(Context::FCONTEXT_INDEX)));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002279 __ ldr(result, ContextOperand(result, instr->slot_index()));
2280}
2281
2282
Steve Block1e0659c2011-05-24 12:43:12 +01002283void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2284 Register context = ToRegister(instr->context());
2285 Register value = ToRegister(instr->value());
2286 __ ldr(context,
2287 MemOperand(context, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2288 __ str(value, ContextOperand(context, instr->slot_index()));
2289 if (instr->needs_write_barrier()) {
2290 int offset = Context::SlotOffset(instr->slot_index());
2291 __ RecordWrite(context, Operand(offset), value, scratch0());
2292 }
2293}
2294
2295
Ben Murdochb0fe1622011-05-05 13:52:32 +01002296void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002297 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002298 Register result = ToRegister(instr->result());
2299 if (instr->hydrogen()->is_in_object()) {
2300 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2301 } else {
2302 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2303 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2304 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002305}
2306
2307
2308void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2309 ASSERT(ToRegister(instr->object()).is(r0));
2310 ASSERT(ToRegister(instr->result()).is(r0));
2311
2312 // Name is always in r2.
2313 __ mov(r2, Operand(instr->name()));
2314 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
2315 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2316}
2317
2318
Steve Block9fac8402011-05-12 15:51:54 +01002319void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2320 Register scratch = scratch0();
2321 Register function = ToRegister(instr->function());
2322 Register result = ToRegister(instr->result());
2323
2324 // Check that the function really is a function. Load map into the
2325 // result register.
2326 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2327 DeoptimizeIf(ne, instr->environment());
2328
2329 // Make sure that the function has an instance prototype.
2330 Label non_instance;
2331 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2332 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2333 __ b(ne, &non_instance);
2334
2335 // Get the prototype or initial map from the function.
2336 __ ldr(result,
2337 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2338
2339 // Check that the function has a prototype or an initial map.
2340 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2341 __ cmp(result, ip);
2342 DeoptimizeIf(eq, instr->environment());
2343
2344 // If the function does not have an initial map, we're done.
2345 Label done;
2346 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2347 __ b(ne, &done);
2348
2349 // Get the prototype from the initial map.
2350 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2351 __ jmp(&done);
2352
2353 // Non-instance prototype: Fetch prototype from constructor field
2354 // in initial map.
2355 __ bind(&non_instance);
2356 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2357
2358 // All done.
2359 __ bind(&done);
2360}
2361
2362
Ben Murdochb0fe1622011-05-05 13:52:32 +01002363void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002364 Register result = ToRegister(instr->result());
2365 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002366 Register scratch = scratch0();
2367
Steve Block1e0659c2011-05-24 12:43:12 +01002368 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002369 if (FLAG_debug_code) {
2370 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01002371 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002372 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2373 __ cmp(scratch, ip);
2374 __ b(eq, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01002375 __ LoadRoot(ip, Heap::kPixelArrayMapRootIndex);
2376 __ cmp(scratch, ip);
2377 __ b(eq, &done);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002378 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2379 __ cmp(scratch, ip);
2380 __ Check(eq, "Check for fast elements failed.");
2381 __ bind(&done);
2382 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002383}
2384
2385
Steve Block1e0659c2011-05-24 12:43:12 +01002386void LCodeGen::DoLoadPixelArrayExternalPointer(
2387 LLoadPixelArrayExternalPointer* instr) {
2388 Register to_reg = ToRegister(instr->result());
2389 Register from_reg = ToRegister(instr->InputAt(0));
2390 __ ldr(to_reg, FieldMemOperand(from_reg, PixelArray::kExternalPointerOffset));
2391}
2392
2393
Ben Murdochb0fe1622011-05-05 13:52:32 +01002394void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002395 Register arguments = ToRegister(instr->arguments());
2396 Register length = ToRegister(instr->length());
2397 Register index = ToRegister(instr->index());
2398 Register result = ToRegister(instr->result());
2399
2400 // Bailout index is not a valid argument index. Use unsigned check to get
2401 // negative check for free.
2402 __ sub(length, length, index, SetCC);
2403 DeoptimizeIf(ls, instr->environment());
2404
2405 // There are two words between the frame pointer and the last argument.
2406 // Subtracting from length accounts for one of them add one more.
2407 __ add(length, length, Operand(1));
2408 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002409}
2410
2411
2412void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002413 Register elements = ToRegister(instr->elements());
2414 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002415 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002416 Register scratch = scratch0();
Ben Murdochb8e0da22011-05-16 14:20:40 +01002417 ASSERT(result.is(elements));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002418
2419 // Load the result.
2420 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2421 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2422
Ben Murdochb8e0da22011-05-16 14:20:40 +01002423 // Check for the hole value.
2424 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2425 __ cmp(result, scratch);
2426 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002427}
2428
2429
Steve Block1e0659c2011-05-24 12:43:12 +01002430void LCodeGen::DoLoadPixelArrayElement(LLoadPixelArrayElement* instr) {
2431 Register external_elements = ToRegister(instr->external_pointer());
2432 Register key = ToRegister(instr->key());
2433 Register result = ToRegister(instr->result());
2434
2435 // Load the result.
2436 __ ldrb(result, MemOperand(external_elements, key));
2437}
2438
2439
Ben Murdochb0fe1622011-05-05 13:52:32 +01002440void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2441 ASSERT(ToRegister(instr->object()).is(r1));
2442 ASSERT(ToRegister(instr->key()).is(r0));
2443
2444 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2445 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2446}
2447
2448
2449void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002450 Register scratch = scratch0();
2451 Register result = ToRegister(instr->result());
2452
2453 // Check if the calling frame is an arguments adaptor frame.
2454 Label done, adapted;
2455 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2456 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2457 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2458
2459 // Result is the frame pointer for the frame if not adapted and for the real
2460 // frame below the adaptor frame if adapted.
2461 __ mov(result, fp, LeaveCC, ne);
2462 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002463}
2464
2465
2466void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002467 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002468 Register result = ToRegister(instr->result());
2469
2470 Label done;
2471
2472 // If no arguments adaptor frame the number of arguments is fixed.
2473 __ cmp(fp, elem);
2474 __ mov(result, Operand(scope()->num_parameters()));
2475 __ b(eq, &done);
2476
2477 // Arguments adaptor frame present. Get argument length from there.
2478 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2479 __ ldr(result,
2480 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2481 __ SmiUntag(result);
2482
2483 // Argument length is in result register.
2484 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002485}
2486
2487
2488void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002489 Register receiver = ToRegister(instr->receiver());
2490 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002491 Register length = ToRegister(instr->length());
2492 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002493 Register scratch = scratch0();
2494 ASSERT(receiver.is(r0)); // Used for parameter count.
2495 ASSERT(function.is(r1)); // Required by InvokeFunction.
2496 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002497
Steve Block1e0659c2011-05-24 12:43:12 +01002498 // If the receiver is null or undefined, we have to pass the global object
2499 // as a receiver.
2500 Label global_object, receiver_ok;
2501 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2502 __ cmp(receiver, scratch);
2503 __ b(eq, &global_object);
2504 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2505 __ cmp(receiver, scratch);
2506 __ b(eq, &global_object);
2507
2508 // Deoptimize if the receiver is not a JS object.
2509 __ tst(receiver, Operand(kSmiTagMask));
2510 DeoptimizeIf(eq, instr->environment());
2511 __ CompareObjectType(receiver, scratch, scratch, FIRST_JS_OBJECT_TYPE);
2512 DeoptimizeIf(lo, instr->environment());
2513 __ jmp(&receiver_ok);
2514
2515 __ bind(&global_object);
2516 __ ldr(receiver, GlobalObjectOperand());
2517 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002518
2519 // Copy the arguments to this function possibly from the
2520 // adaptor frame below it.
2521 const uint32_t kArgumentsLimit = 1 * KB;
2522 __ cmp(length, Operand(kArgumentsLimit));
2523 DeoptimizeIf(hi, instr->environment());
2524
2525 // Push the receiver and use the register to keep the original
2526 // number of arguments.
2527 __ push(receiver);
2528 __ mov(receiver, length);
2529 // The arguments are at a one pointer size offset from elements.
2530 __ add(elements, elements, Operand(1 * kPointerSize));
2531
2532 // Loop through the arguments pushing them onto the execution
2533 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002534 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002535 // length is a small non-negative integer, due to the test above.
2536 __ tst(length, Operand(length));
2537 __ b(eq, &invoke);
2538 __ bind(&loop);
2539 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2540 __ push(scratch);
2541 __ sub(length, length, Operand(1), SetCC);
2542 __ b(ne, &loop);
2543
2544 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002545 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2546 LPointerMap* pointers = instr->pointer_map();
2547 LEnvironment* env = instr->deoptimization_environment();
2548 RecordPosition(pointers->position());
2549 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002550 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002551 pointers,
2552 env->deoptimization_index());
2553 // The number of arguments is stored in receiver which is r0, as expected
2554 // by InvokeFunction.
2555 v8::internal::ParameterCount actual(receiver);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002556 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
Steve Block1e0659c2011-05-24 12:43:12 +01002557 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002558}
2559
2560
2561void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002562 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002563 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2564 Abort("DoPushArgument not implemented for double type.");
2565 } else {
2566 Register argument_reg = EmitLoadRegister(argument, ip);
2567 __ push(argument_reg);
2568 }
2569}
2570
2571
Steve Block1e0659c2011-05-24 12:43:12 +01002572void LCodeGen::DoContext(LContext* instr) {
2573 Register result = ToRegister(instr->result());
2574 __ mov(result, cp);
2575}
2576
2577
2578void LCodeGen::DoOuterContext(LOuterContext* instr) {
2579 Register context = ToRegister(instr->context());
2580 Register result = ToRegister(instr->result());
2581 __ ldr(result,
2582 MemOperand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2583 __ ldr(result, FieldMemOperand(result, JSFunction::kContextOffset));
2584}
2585
2586
Ben Murdochb0fe1622011-05-05 13:52:32 +01002587void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002588 Register context = ToRegister(instr->context());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002589 Register result = ToRegister(instr->result());
2590 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2591}
2592
2593
2594void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002595 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002596 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002597 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002598}
2599
2600
2601void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2602 int arity,
2603 LInstruction* instr) {
2604 // Change context if needed.
2605 bool change_context =
2606 (graph()->info()->closure()->context() != function->context()) ||
2607 scope()->contains_with() ||
2608 (scope()->num_heap_slots() > 0);
2609 if (change_context) {
2610 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2611 }
2612
2613 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2614 // is available to write to at this point.
2615 if (!function->NeedsArgumentsAdaption()) {
2616 __ mov(r0, Operand(arity));
2617 }
2618
2619 LPointerMap* pointers = instr->pointer_map();
2620 RecordPosition(pointers->position());
2621
2622 // Invoke function.
2623 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2624 __ Call(ip);
2625
2626 // Setup deoptimization.
2627 RegisterLazyDeoptimization(instr);
2628
2629 // Restore context.
2630 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2631}
2632
2633
2634void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002635 ASSERT(ToRegister(instr->result()).is(r0));
2636 __ mov(r1, Operand(instr->function()));
2637 CallKnownFunction(instr->function(), instr->arity(), instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002638}
2639
2640
2641void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002642 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2643 Register input = ToRegister(instr->InputAt(0));
2644 Register scratch = scratch0();
2645
2646 // Deoptimize if not a heap number.
2647 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2648 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2649 __ cmp(scratch, Operand(ip));
2650 DeoptimizeIf(ne, instr->environment());
2651
2652 Label done;
2653 Register exponent = scratch0();
2654 scratch = no_reg;
2655 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2656 // Check the sign of the argument. If the argument is positive, just
2657 // return it. We do not need to patch the stack since |input| and
2658 // |result| are the same register and |input| would be restored
2659 // unchanged by popping safepoint registers.
2660 __ tst(exponent, Operand(HeapNumber::kSignMask));
2661 __ b(eq, &done);
2662
2663 // Input is negative. Reverse its sign.
2664 // Preserve the value of all registers.
2665 __ PushSafepointRegisters();
2666
2667 // Registers were saved at the safepoint, so we can use
2668 // many scratch registers.
2669 Register tmp1 = input.is(r1) ? r0 : r1;
2670 Register tmp2 = input.is(r2) ? r0 : r2;
2671 Register tmp3 = input.is(r3) ? r0 : r3;
2672 Register tmp4 = input.is(r4) ? r0 : r4;
2673
2674 // exponent: floating point exponent value.
2675
2676 Label allocated, slow;
2677 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2678 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2679 __ b(&allocated);
2680
2681 // Slow case: Call the runtime system to do the number allocation.
2682 __ bind(&slow);
2683
2684 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2685 RecordSafepointWithRegisters(
2686 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2687 // Set the pointer to the new heap number in tmp.
2688 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2689 // Restore input_reg after call to runtime.
2690 __ LoadFromSafepointRegisterSlot(input);
2691 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2692
2693 __ bind(&allocated);
2694 // exponent: floating point exponent value.
2695 // tmp1: allocated heap number.
2696 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2697 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2698 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2699 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
2700
2701 __ str(tmp1, masm()->SafepointRegisterSlot(input));
2702 __ PopSafepointRegisters();
2703
2704 __ bind(&done);
2705}
2706
2707
2708void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2709 Register input = ToRegister(instr->InputAt(0));
2710 __ cmp(input, Operand(0));
2711 // We can make rsb conditional because the previous cmp instruction
2712 // will clear the V (overflow) flag and rsb won't set this flag
2713 // if input is positive.
2714 __ rsb(input, input, Operand(0), SetCC, mi);
2715 // Deoptimize on overflow.
2716 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002717}
2718
2719
2720void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002721 // Class for deferred case.
2722 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2723 public:
2724 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2725 LUnaryMathOperation* instr)
2726 : LDeferredCode(codegen), instr_(instr) { }
2727 virtual void Generate() {
2728 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2729 }
2730 private:
2731 LUnaryMathOperation* instr_;
2732 };
2733
2734 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2735 Representation r = instr->hydrogen()->value()->representation();
2736 if (r.IsDouble()) {
2737 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
2738 __ vabs(input, input);
2739 } else if (r.IsInteger32()) {
2740 EmitIntegerMathAbs(instr);
2741 } else {
2742 // Representation is tagged.
2743 DeferredMathAbsTaggedHeapNumber* deferred =
2744 new DeferredMathAbsTaggedHeapNumber(this, instr);
2745 Register input = ToRegister(instr->InputAt(0));
2746 // Smi check.
2747 __ JumpIfNotSmi(input, deferred->entry());
2748 // If smi, handle it directly.
2749 EmitIntegerMathAbs(instr);
2750 __ bind(deferred->exit());
2751 }
2752}
2753
2754
2755// Truncates a double using a specific rounding mode.
2756// Clears the z flag (ne condition) if an overflow occurs.
2757void LCodeGen::EmitVFPTruncate(VFPRoundingMode rounding_mode,
2758 SwVfpRegister result,
2759 DwVfpRegister double_input,
2760 Register scratch1,
2761 Register scratch2) {
2762 Register prev_fpscr = scratch1;
2763 Register scratch = scratch2;
2764
2765 // Set custom FPCSR:
2766 // - Set rounding mode.
2767 // - Clear vfp cumulative exception flags.
2768 // - Make sure Flush-to-zero mode control bit is unset.
2769 __ vmrs(prev_fpscr);
2770 __ bic(scratch, prev_fpscr, Operand(kVFPExceptionMask |
2771 kVFPRoundingModeMask |
2772 kVFPFlushToZeroMask));
2773 __ orr(scratch, scratch, Operand(rounding_mode));
2774 __ vmsr(scratch);
2775
2776 // Convert the argument to an integer.
2777 __ vcvt_s32_f64(result,
2778 double_input,
2779 kFPSCRRounding);
2780
2781 // Retrieve FPSCR.
2782 __ vmrs(scratch);
2783 // Restore FPSCR.
2784 __ vmsr(prev_fpscr);
2785 // Check for vfp exceptions.
2786 __ tst(scratch, Operand(kVFPExceptionMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002787}
2788
2789
2790void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002791 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002792 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002793 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002794 Register scratch1 = scratch0();
2795 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002796
Steve Block1e0659c2011-05-24 12:43:12 +01002797 EmitVFPTruncate(kRoundToMinusInf,
2798 single_scratch,
Ben Murdochb8e0da22011-05-16 14:20:40 +01002799 input,
Steve Block1e0659c2011-05-24 12:43:12 +01002800 scratch1,
2801 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002802 DeoptimizeIf(ne, instr->environment());
2803
2804 // Move the result back to general purpose register r0.
2805 __ vmov(result, single_scratch);
2806
2807 // Test for -0.
2808 Label done;
2809 __ cmp(result, Operand(0));
2810 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01002811 __ vmov(scratch1, input.high());
2812 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002813 DeoptimizeIf(ne, instr->environment());
2814 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002815}
2816
2817
2818void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002819 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002820 ASSERT(ToDoubleRegister(instr->result()).is(input));
2821 __ vsqrt(input, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002822}
2823
2824
2825void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2826 switch (instr->op()) {
2827 case kMathAbs:
2828 DoMathAbs(instr);
2829 break;
2830 case kMathFloor:
2831 DoMathFloor(instr);
2832 break;
2833 case kMathSqrt:
2834 DoMathSqrt(instr);
2835 break;
2836 default:
2837 Abort("Unimplemented type of LUnaryMathOperation.");
2838 UNREACHABLE();
2839 }
2840}
2841
2842
2843void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002844 ASSERT(ToRegister(instr->result()).is(r0));
2845
2846 int arity = instr->arity();
2847 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2848 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2849 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002850}
2851
2852
2853void LCodeGen::DoCallNamed(LCallNamed* instr) {
2854 ASSERT(ToRegister(instr->result()).is(r0));
2855
2856 int arity = instr->arity();
2857 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2858 __ mov(r2, Operand(instr->name()));
2859 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2860 // Restore context register.
2861 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2862}
2863
2864
2865void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002866 ASSERT(ToRegister(instr->result()).is(r0));
2867
2868 int arity = instr->arity();
2869 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2870 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2871 __ Drop(1);
2872 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002873}
2874
2875
2876void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002877 ASSERT(ToRegister(instr->result()).is(r0));
2878
2879 int arity = instr->arity();
2880 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2881 __ mov(r2, Operand(instr->name()));
2882 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2883 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002884}
2885
2886
2887void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2888 ASSERT(ToRegister(instr->result()).is(r0));
2889 __ mov(r1, Operand(instr->target()));
2890 CallKnownFunction(instr->target(), instr->arity(), instr);
2891}
2892
2893
2894void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002895 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002896 ASSERT(ToRegister(instr->result()).is(r0));
2897
2898 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2899 __ mov(r0, Operand(instr->arity()));
2900 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2901}
2902
2903
2904void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2905 CallRuntime(instr->function(), instr->arity(), instr);
2906}
2907
2908
2909void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002910 Register object = ToRegister(instr->object());
2911 Register value = ToRegister(instr->value());
2912 Register scratch = scratch0();
2913 int offset = instr->offset();
2914
2915 ASSERT(!object.is(value));
2916
2917 if (!instr->transition().is_null()) {
2918 __ mov(scratch, Operand(instr->transition()));
2919 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2920 }
2921
2922 // Do the store.
2923 if (instr->is_in_object()) {
2924 __ str(value, FieldMemOperand(object, offset));
2925 if (instr->needs_write_barrier()) {
2926 // Update the write barrier for the object for in-object properties.
2927 __ RecordWrite(object, Operand(offset), value, scratch);
2928 }
2929 } else {
2930 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
2931 __ str(value, FieldMemOperand(scratch, offset));
2932 if (instr->needs_write_barrier()) {
2933 // Update the write barrier for the properties array.
2934 // object is used as a scratch register.
2935 __ RecordWrite(scratch, Operand(offset), value, object);
2936 }
2937 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002938}
2939
2940
2941void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2942 ASSERT(ToRegister(instr->object()).is(r1));
2943 ASSERT(ToRegister(instr->value()).is(r0));
2944
2945 // Name is always in r2.
2946 __ mov(r2, Operand(instr->name()));
Steve Block1e0659c2011-05-24 12:43:12 +01002947 Handle<Code> ic(Builtins::builtin(info_->is_strict()
2948 ? Builtins::StoreIC_Initialize_Strict
2949 : Builtins::StoreIC_Initialize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002950 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2951}
2952
2953
2954void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002955 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01002956 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002957}
2958
2959
2960void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002961 Register value = ToRegister(instr->value());
2962 Register elements = ToRegister(instr->object());
2963 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
2964 Register scratch = scratch0();
2965
2966 // Do the store.
2967 if (instr->key()->IsConstantOperand()) {
2968 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
2969 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
2970 int offset =
2971 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
2972 __ str(value, FieldMemOperand(elements, offset));
2973 } else {
2974 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2975 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2976 }
2977
2978 if (instr->hydrogen()->NeedsWriteBarrier()) {
2979 // Compute address of modified element and store it into key register.
2980 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
2981 __ RecordWrite(elements, key, value);
2982 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002983}
2984
2985
2986void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2987 ASSERT(ToRegister(instr->object()).is(r2));
2988 ASSERT(ToRegister(instr->key()).is(r1));
2989 ASSERT(ToRegister(instr->value()).is(r0));
2990
2991 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
2992 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2993}
2994
2995
Steve Block1e0659c2011-05-24 12:43:12 +01002996void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
2997 class DeferredStringCharCodeAt: public LDeferredCode {
2998 public:
2999 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3000 : LDeferredCode(codegen), instr_(instr) { }
3001 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3002 private:
3003 LStringCharCodeAt* instr_;
3004 };
3005
3006 Register scratch = scratch0();
3007 Register string = ToRegister(instr->string());
3008 Register index = no_reg;
3009 int const_index = -1;
3010 if (instr->index()->IsConstantOperand()) {
3011 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3012 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3013 if (!Smi::IsValid(const_index)) {
3014 // Guaranteed to be out of bounds because of the assert above.
3015 // So the bounds check that must dominate this instruction must
3016 // have deoptimized already.
3017 if (FLAG_debug_code) {
3018 __ Abort("StringCharCodeAt: out of bounds index.");
3019 }
3020 // No code needs to be generated.
3021 return;
3022 }
3023 } else {
3024 index = ToRegister(instr->index());
3025 }
3026 Register result = ToRegister(instr->result());
3027
3028 DeferredStringCharCodeAt* deferred =
3029 new DeferredStringCharCodeAt(this, instr);
3030
3031 Label flat_string, ascii_string, done;
3032
3033 // Fetch the instance type of the receiver into result register.
3034 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3035 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3036
3037 // We need special handling for non-flat strings.
3038 STATIC_ASSERT(kSeqStringTag == 0);
3039 __ tst(result, Operand(kStringRepresentationMask));
3040 __ b(eq, &flat_string);
3041
3042 // Handle non-flat strings.
3043 __ tst(result, Operand(kIsConsStringMask));
3044 __ b(eq, deferred->entry());
3045
3046 // ConsString.
3047 // Check whether the right hand side is the empty string (i.e. if
3048 // this is really a flat string in a cons string). If that is not
3049 // the case we would rather go to the runtime system now to flatten
3050 // the string.
3051 __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset));
3052 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
3053 __ cmp(scratch, ip);
3054 __ b(ne, deferred->entry());
3055 // Get the first of the two strings and load its instance type.
3056 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
3057 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3058 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3059 // If the first cons component is also non-flat, then go to runtime.
3060 STATIC_ASSERT(kSeqStringTag == 0);
3061 __ tst(result, Operand(kStringRepresentationMask));
3062 __ b(ne, deferred->entry());
3063
3064 // Check for 1-byte or 2-byte string.
3065 __ bind(&flat_string);
3066 STATIC_ASSERT(kAsciiStringTag != 0);
3067 __ tst(result, Operand(kStringEncodingMask));
3068 __ b(ne, &ascii_string);
3069
3070 // 2-byte string.
3071 // Load the 2-byte character code into the result register.
3072 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3073 if (instr->index()->IsConstantOperand()) {
3074 __ ldrh(result,
3075 FieldMemOperand(string,
3076 SeqTwoByteString::kHeaderSize + 2 * const_index));
3077 } else {
3078 __ add(scratch,
3079 string,
3080 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3081 __ ldrh(result, MemOperand(scratch, index, LSL, 1));
3082 }
3083 __ jmp(&done);
3084
3085 // ASCII string.
3086 // Load the byte into the result register.
3087 __ bind(&ascii_string);
3088 if (instr->index()->IsConstantOperand()) {
3089 __ ldrb(result, FieldMemOperand(string,
3090 SeqAsciiString::kHeaderSize + const_index));
3091 } else {
3092 __ add(scratch,
3093 string,
3094 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3095 __ ldrb(result, MemOperand(scratch, index));
3096 }
3097 __ bind(&done);
3098 __ bind(deferred->exit());
3099}
3100
3101
3102void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3103 Register string = ToRegister(instr->string());
3104 Register result = ToRegister(instr->result());
3105 Register scratch = scratch0();
3106
3107 // TODO(3095996): Get rid of this. For now, we need to make the
3108 // result register contain a valid pointer because it is already
3109 // contained in the register pointer map.
3110 __ mov(result, Operand(0));
3111
3112 __ PushSafepointRegisters();
3113 __ push(string);
3114 // Push the index as a smi. This is safe because of the checks in
3115 // DoStringCharCodeAt above.
3116 if (instr->index()->IsConstantOperand()) {
3117 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3118 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3119 __ push(scratch);
3120 } else {
3121 Register index = ToRegister(instr->index());
3122 __ SmiTag(index);
3123 __ push(index);
3124 }
3125 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
3126 RecordSafepointWithRegisters(
3127 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
3128 if (FLAG_debug_code) {
3129 __ AbortIfNotSmi(r0);
3130 }
3131 __ SmiUntag(r0);
3132 MemOperand result_stack_slot = masm()->SafepointRegisterSlot(result);
3133 __ str(r0, result_stack_slot);
3134 __ PopSafepointRegisters();
3135}
3136
3137
3138void LCodeGen::DoStringLength(LStringLength* instr) {
3139 Register string = ToRegister(instr->InputAt(0));
3140 Register result = ToRegister(instr->result());
3141 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3142}
3143
3144
Ben Murdochb0fe1622011-05-05 13:52:32 +01003145void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003146 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003147 ASSERT(input->IsRegister() || input->IsStackSlot());
3148 LOperand* output = instr->result();
3149 ASSERT(output->IsDoubleRegister());
3150 SwVfpRegister single_scratch = double_scratch0().low();
3151 if (input->IsStackSlot()) {
3152 Register scratch = scratch0();
3153 __ ldr(scratch, ToMemOperand(input));
3154 __ vmov(single_scratch, scratch);
3155 } else {
3156 __ vmov(single_scratch, ToRegister(input));
3157 }
3158 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003159}
3160
3161
3162void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3163 class DeferredNumberTagI: public LDeferredCode {
3164 public:
3165 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3166 : LDeferredCode(codegen), instr_(instr) { }
3167 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3168 private:
3169 LNumberTagI* instr_;
3170 };
3171
Steve Block1e0659c2011-05-24 12:43:12 +01003172 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003173 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3174 Register reg = ToRegister(input);
3175
3176 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3177 __ SmiTag(reg, SetCC);
3178 __ b(vs, deferred->entry());
3179 __ bind(deferred->exit());
3180}
3181
3182
3183void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3184 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003185 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003186 DoubleRegister dbl_scratch = d0;
3187 SwVfpRegister flt_scratch = s0;
3188
3189 // Preserve the value of all registers.
3190 __ PushSafepointRegisters();
3191
3192 // There was overflow, so bits 30 and 31 of the original integer
3193 // disagree. Try to allocate a heap number in new space and store
3194 // the value in there. If that fails, call the runtime system.
3195 Label done;
3196 __ SmiUntag(reg);
3197 __ eor(reg, reg, Operand(0x80000000));
3198 __ vmov(flt_scratch, reg);
3199 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3200 if (FLAG_inline_new) {
3201 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3202 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3203 if (!reg.is(r5)) __ mov(reg, r5);
3204 __ b(&done);
3205 }
3206
3207 // Slow case: Call the runtime system to do the number allocation.
3208 __ bind(&slow);
3209
3210 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3211 // register is stored, as this register is in the pointer map, but contains an
3212 // integer value.
3213 __ mov(ip, Operand(0));
3214 int reg_stack_index = __ SafepointRegisterStackIndex(reg.code());
3215 __ str(ip, MemOperand(sp, reg_stack_index * kPointerSize));
3216
3217 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3218 RecordSafepointWithRegisters(
3219 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3220 if (!reg.is(r0)) __ mov(reg, r0);
3221
3222 // Done. Put the value in dbl_scratch into the value of the allocated heap
3223 // number.
3224 __ bind(&done);
3225 __ sub(ip, reg, Operand(kHeapObjectTag));
3226 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
3227 __ str(reg, MemOperand(sp, reg_stack_index * kPointerSize));
3228 __ PopSafepointRegisters();
3229}
3230
3231
3232void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3233 class DeferredNumberTagD: public LDeferredCode {
3234 public:
3235 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3236 : LDeferredCode(codegen), instr_(instr) { }
3237 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3238 private:
3239 LNumberTagD* instr_;
3240 };
3241
Steve Block1e0659c2011-05-24 12:43:12 +01003242 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003243 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003244 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003245 Register temp1 = ToRegister(instr->TempAt(0));
3246 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003247
3248 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3249 if (FLAG_inline_new) {
3250 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3251 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3252 } else {
3253 __ jmp(deferred->entry());
3254 }
3255 __ bind(deferred->exit());
3256 __ sub(ip, reg, Operand(kHeapObjectTag));
3257 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3258}
3259
3260
3261void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3262 // TODO(3095996): Get rid of this. For now, we need to make the
3263 // result register contain a valid pointer because it is already
3264 // contained in the register pointer map.
3265 Register reg = ToRegister(instr->result());
3266 __ mov(reg, Operand(0));
3267
3268 __ PushSafepointRegisters();
3269 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3270 RecordSafepointWithRegisters(
3271 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3272 int reg_stack_index = __ SafepointRegisterStackIndex(reg.code());
3273 __ str(r0, MemOperand(sp, reg_stack_index * kPointerSize));
3274 __ PopSafepointRegisters();
3275}
3276
3277
3278void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003279 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003280 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3281 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3282 __ SmiTag(ToRegister(input));
3283}
3284
3285
3286void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003287 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003288 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3289 if (instr->needs_check()) {
3290 __ tst(ToRegister(input), Operand(kSmiTagMask));
3291 DeoptimizeIf(ne, instr->environment());
3292 }
3293 __ SmiUntag(ToRegister(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003294}
3295
3296
3297void LCodeGen::EmitNumberUntagD(Register input_reg,
3298 DoubleRegister result_reg,
3299 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003300 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003301 SwVfpRegister flt_scratch = s0;
3302 ASSERT(!result_reg.is(d0));
3303
3304 Label load_smi, heap_number, done;
3305
3306 // Smi check.
3307 __ tst(input_reg, Operand(kSmiTagMask));
3308 __ b(eq, &load_smi);
3309
3310 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003311 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003312 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003313 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003314 __ b(eq, &heap_number);
3315
3316 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3317 __ cmp(input_reg, Operand(ip));
3318 DeoptimizeIf(ne, env);
3319
3320 // Convert undefined to NaN.
3321 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3322 __ sub(ip, ip, Operand(kHeapObjectTag));
3323 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3324 __ jmp(&done);
3325
3326 // Heap number to double register conversion.
3327 __ bind(&heap_number);
3328 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3329 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3330 __ jmp(&done);
3331
3332 // Smi to double register conversion
3333 __ bind(&load_smi);
3334 __ SmiUntag(input_reg); // Untag smi before converting to float.
3335 __ vmov(flt_scratch, input_reg);
3336 __ vcvt_f64_s32(result_reg, flt_scratch);
3337 __ SmiTag(input_reg); // Retag smi.
3338 __ bind(&done);
3339}
3340
3341
3342class DeferredTaggedToI: public LDeferredCode {
3343 public:
3344 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3345 : LDeferredCode(codegen), instr_(instr) { }
3346 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3347 private:
3348 LTaggedToI* instr_;
3349};
3350
3351
3352void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3353 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003354 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003355 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003356 DoubleRegister dbl_scratch = d0;
3357 SwVfpRegister flt_scratch = s0;
Steve Block1e0659c2011-05-24 12:43:12 +01003358 DoubleRegister dbl_tmp = ToDoubleRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003359
3360 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003361 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003362 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003363 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003364
3365 if (instr->truncating()) {
3366 Label heap_number;
3367 __ b(eq, &heap_number);
3368 // Check for undefined. Undefined is converted to zero for truncating
3369 // conversions.
3370 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3371 __ cmp(input_reg, Operand(ip));
3372 DeoptimizeIf(ne, instr->environment());
3373 __ mov(input_reg, Operand(0));
3374 __ b(&done);
3375
3376 __ bind(&heap_number);
3377 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3378 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003379 __ vcmp(dbl_tmp, 0.0); // Sets overflow bit in FPSCR flags if NaN.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003380 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
3381 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
3382 __ vmrs(pc); // Move vector status bits to normal status bits.
3383 // Overflow bit is set if dbl_tmp is Nan.
3384 __ cmn(input_reg, Operand(1), vc); // 0x7fffffff + 1 -> overflow.
3385 __ cmp(input_reg, Operand(1), vc); // 0x80000000 - 1 -> overflow.
3386 DeoptimizeIf(vs, instr->environment()); // Saturation may have occured.
3387
3388 } else {
3389 // Deoptimize if we don't have a heap number.
3390 DeoptimizeIf(ne, instr->environment());
3391
3392 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3393 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
3394 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
3395 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
3396 // Non-truncating conversion means that we cannot lose bits, so we convert
3397 // back to check; note that using non-overlapping s and d regs would be
3398 // slightly faster.
3399 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003400 __ VFPCompareAndSetFlags(dbl_scratch, dbl_tmp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003401 DeoptimizeIf(ne, instr->environment()); // Not equal or unordered.
3402 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3403 __ tst(input_reg, Operand(input_reg));
3404 __ b(ne, &done);
3405 __ vmov(lr, ip, dbl_tmp);
3406 __ tst(ip, Operand(1 << 31)); // Test sign bit.
3407 DeoptimizeIf(ne, instr->environment());
3408 }
3409 }
3410 __ bind(&done);
3411}
3412
3413
3414void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003415 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003416 ASSERT(input->IsRegister());
3417 ASSERT(input->Equals(instr->result()));
3418
3419 Register input_reg = ToRegister(input);
3420
3421 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3422
3423 // Smi check.
3424 __ tst(input_reg, Operand(kSmiTagMask));
3425 __ b(ne, deferred->entry());
3426
3427 // Smi to int32 conversion
3428 __ SmiUntag(input_reg); // Untag smi.
3429
3430 __ bind(deferred->exit());
3431}
3432
3433
3434void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003435 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003436 ASSERT(input->IsRegister());
3437 LOperand* result = instr->result();
3438 ASSERT(result->IsDoubleRegister());
3439
3440 Register input_reg = ToRegister(input);
3441 DoubleRegister result_reg = ToDoubleRegister(result);
3442
3443 EmitNumberUntagD(input_reg, result_reg, instr->environment());
3444}
3445
3446
3447void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003448 LOperand* input = instr->InputAt(0);
3449 ASSERT(input->IsDoubleRegister());
3450 LOperand* result = instr->result();
3451 ASSERT(result->IsRegister());
3452
3453 DoubleRegister double_input = ToDoubleRegister(input);
3454 Register result_reg = ToRegister(result);
3455 SwVfpRegister single_scratch = double_scratch0().low();
3456 Register scratch1 = scratch0();
3457 Register scratch2 = ToRegister(instr->TempAt(0));
3458
3459 VFPRoundingMode rounding_mode = instr->truncating() ? kRoundToMinusInf
3460 : kRoundToNearest;
3461
3462 EmitVFPTruncate(rounding_mode,
3463 single_scratch,
3464 double_input,
3465 scratch1,
3466 scratch2);
3467 // Deoptimize if we had a vfp invalid exception.
3468 DeoptimizeIf(ne, instr->environment());
3469 // Retrieve the result.
3470 __ vmov(result_reg, single_scratch);
3471
3472 if (instr->truncating() &&
3473 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3474 Label done;
3475 __ cmp(result_reg, Operand(0));
3476 __ b(ne, &done);
3477 // Check for -0.
3478 __ vmov(scratch1, double_input.high());
3479 __ tst(scratch1, Operand(HeapNumber::kSignMask));
3480 DeoptimizeIf(ne, instr->environment());
3481
3482 __ bind(&done);
3483 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003484}
3485
3486
3487void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003488 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003489 ASSERT(input->IsRegister());
3490 __ tst(ToRegister(input), Operand(kSmiTagMask));
3491 DeoptimizeIf(instr->condition(), instr->environment());
3492}
3493
3494
3495void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003496 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003497 Register scratch = scratch0();
3498 InstanceType first = instr->hydrogen()->first();
3499 InstanceType last = instr->hydrogen()->last();
3500
3501 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3502 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3503 __ cmp(scratch, Operand(first));
3504
3505 // If there is only one type in the interval check for equality.
3506 if (first == last) {
3507 DeoptimizeIf(ne, instr->environment());
3508 } else {
3509 DeoptimizeIf(lo, instr->environment());
3510 // Omit check for the last type.
3511 if (last != LAST_TYPE) {
3512 __ cmp(scratch, Operand(last));
3513 DeoptimizeIf(hi, instr->environment());
3514 }
3515 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003516}
3517
3518
3519void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003520 ASSERT(instr->InputAt(0)->IsRegister());
3521 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003522 __ cmp(reg, Operand(instr->hydrogen()->target()));
3523 DeoptimizeIf(ne, instr->environment());
3524}
3525
3526
3527void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003528 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01003529 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003530 ASSERT(input->IsRegister());
3531 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01003532 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
3533 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003534 DeoptimizeIf(ne, instr->environment());
3535}
3536
3537
Ben Murdochb8e0da22011-05-16 14:20:40 +01003538void LCodeGen::LoadHeapObject(Register result,
3539 Handle<HeapObject> object) {
3540 if (Heap::InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01003541 Handle<JSGlobalPropertyCell> cell =
Ben Murdochb8e0da22011-05-16 14:20:40 +01003542 Factory::NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01003543 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01003544 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01003545 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003546 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01003547 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003548}
3549
3550
3551void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003552 Register temp1 = ToRegister(instr->TempAt(0));
3553 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01003554
3555 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01003556 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01003557
3558 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003559 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01003560
3561 // Check prototype maps up to the holder.
3562 while (!current_prototype.is_identical_to(holder)) {
3563 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
3564 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
3565 DeoptimizeIf(ne, instr->environment());
3566 current_prototype =
3567 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3568 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003569 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01003570 }
3571
3572 // Check the holder map.
3573 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
3574 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
3575 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003576}
3577
3578
3579void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003580 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3581 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
3582 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3583 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
3584 __ Push(r3, r2, r1);
3585
3586 // Pick the right runtime function or stub to call.
3587 int length = instr->hydrogen()->length();
3588 if (instr->hydrogen()->IsCopyOnWrite()) {
3589 ASSERT(instr->hydrogen()->depth() == 1);
3590 FastCloneShallowArrayStub::Mode mode =
3591 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3592 FastCloneShallowArrayStub stub(mode, length);
3593 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3594 } else if (instr->hydrogen()->depth() > 1) {
3595 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3596 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3597 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3598 } else {
3599 FastCloneShallowArrayStub::Mode mode =
3600 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3601 FastCloneShallowArrayStub stub(mode, length);
3602 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3603 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003604}
3605
3606
3607void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003608 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3609 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
3610 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3611 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
3612 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3613 __ Push(r4, r3, r2, r1);
3614
3615 // Pick the right runtime function to call.
3616 if (instr->hydrogen()->depth() > 1) {
3617 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3618 } else {
3619 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3620 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003621}
3622
3623
3624void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003625 Label materialized;
3626 // Registers will be used as follows:
3627 // r3 = JS function.
3628 // r7 = literals array.
3629 // r1 = regexp literal.
3630 // r0 = regexp literal clone.
3631 // r2 and r4-r6 are used as temporaries.
3632 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3633 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
3634 int literal_offset = FixedArray::kHeaderSize +
3635 instr->hydrogen()->literal_index() * kPointerSize;
3636 __ ldr(r1, FieldMemOperand(r7, literal_offset));
3637 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3638 __ cmp(r1, ip);
3639 __ b(ne, &materialized);
3640
3641 // Create regexp literal using runtime function
3642 // Result will be in r0.
3643 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3644 __ mov(r5, Operand(instr->hydrogen()->pattern()));
3645 __ mov(r4, Operand(instr->hydrogen()->flags()));
3646 __ Push(r7, r6, r5, r4);
3647 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3648 __ mov(r1, r0);
3649
3650 __ bind(&materialized);
3651 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3652 Label allocated, runtime_allocate;
3653
3654 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
3655 __ jmp(&allocated);
3656
3657 __ bind(&runtime_allocate);
3658 __ mov(r0, Operand(Smi::FromInt(size)));
3659 __ Push(r1, r0);
3660 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3661 __ pop(r1);
3662
3663 __ bind(&allocated);
3664 // Copy the content into the newly allocated memory.
3665 // (Unroll copy loop once for better throughput).
3666 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3667 __ ldr(r3, FieldMemOperand(r1, i));
3668 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
3669 __ str(r3, FieldMemOperand(r0, i));
3670 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
3671 }
3672 if ((size % (2 * kPointerSize)) != 0) {
3673 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
3674 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
3675 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003676}
3677
3678
3679void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003680 // Use the fast case closure allocation code that allocates in new
3681 // space for nested functions that don't need literals cloning.
3682 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01003683 bool pretenure = instr->hydrogen()->pretenure();
Ben Murdoch086aeea2011-05-13 15:57:08 +01003684 if (shared_info->num_literals() == 0 && !pretenure) {
3685 FastNewClosureStub stub;
3686 __ mov(r1, Operand(shared_info));
3687 __ push(r1);
3688 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3689 } else {
3690 __ mov(r2, Operand(shared_info));
3691 __ mov(r1, Operand(pretenure
3692 ? Factory::true_value()
3693 : Factory::false_value()));
3694 __ Push(cp, r2, r1);
3695 CallRuntime(Runtime::kNewClosure, 3, instr);
3696 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003697}
3698
3699
3700void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003701 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003702 __ push(input);
3703 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003704}
3705
3706
3707void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003708 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003709 Register result = ToRegister(instr->result());
3710 Label true_label;
3711 Label false_label;
3712 Label done;
3713
3714 Condition final_branch_condition = EmitTypeofIs(&true_label,
3715 &false_label,
3716 input,
3717 instr->type_literal());
3718 __ b(final_branch_condition, &true_label);
3719 __ bind(&false_label);
3720 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3721 __ b(&done);
3722
3723 __ bind(&true_label);
3724 __ LoadRoot(result, Heap::kTrueValueRootIndex);
3725
3726 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003727}
3728
3729
3730void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003731 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003732 int true_block = chunk_->LookupDestination(instr->true_block_id());
3733 int false_block = chunk_->LookupDestination(instr->false_block_id());
3734 Label* true_label = chunk_->GetAssemblyLabel(true_block);
3735 Label* false_label = chunk_->GetAssemblyLabel(false_block);
3736
3737 Condition final_branch_condition = EmitTypeofIs(true_label,
3738 false_label,
3739 input,
3740 instr->type_literal());
3741
3742 EmitBranch(true_block, false_block, final_branch_condition);
3743}
3744
3745
3746Condition LCodeGen::EmitTypeofIs(Label* true_label,
3747 Label* false_label,
3748 Register input,
3749 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01003750 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01003751 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003752 if (type_name->Equals(Heap::number_symbol())) {
3753 __ tst(input, Operand(kSmiTagMask));
3754 __ b(eq, true_label);
3755 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3756 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3757 __ cmp(input, Operand(ip));
3758 final_branch_condition = eq;
3759
3760 } else if (type_name->Equals(Heap::string_symbol())) {
3761 __ tst(input, Operand(kSmiTagMask));
3762 __ b(eq, false_label);
3763 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3764 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3765 __ tst(ip, Operand(1 << Map::kIsUndetectable));
3766 __ b(ne, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01003767 __ CompareInstanceType(input, scratch, FIRST_NONSTRING_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003768 final_branch_condition = lo;
3769
3770 } else if (type_name->Equals(Heap::boolean_symbol())) {
3771 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
3772 __ cmp(input, ip);
3773 __ b(eq, true_label);
3774 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
3775 __ cmp(input, ip);
3776 final_branch_condition = eq;
3777
3778 } else if (type_name->Equals(Heap::undefined_symbol())) {
3779 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3780 __ cmp(input, ip);
3781 __ b(eq, true_label);
3782 __ tst(input, Operand(kSmiTagMask));
3783 __ b(eq, false_label);
3784 // Check for undetectable objects => true.
3785 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
3786 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3787 __ tst(ip, Operand(1 << Map::kIsUndetectable));
3788 final_branch_condition = ne;
3789
3790 } else if (type_name->Equals(Heap::function_symbol())) {
3791 __ tst(input, Operand(kSmiTagMask));
3792 __ b(eq, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01003793 __ CompareObjectType(input, input, scratch, JS_FUNCTION_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003794 __ b(eq, true_label);
3795 // Regular expressions => 'function' (they are callable).
Steve Block9fac8402011-05-12 15:51:54 +01003796 __ CompareInstanceType(input, scratch, JS_REGEXP_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003797 final_branch_condition = eq;
3798
3799 } else if (type_name->Equals(Heap::object_symbol())) {
3800 __ tst(input, Operand(kSmiTagMask));
3801 __ b(eq, false_label);
3802 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3803 __ cmp(input, ip);
3804 __ b(eq, true_label);
3805 // Regular expressions => 'function', not 'object'.
Steve Block9fac8402011-05-12 15:51:54 +01003806 __ CompareObjectType(input, input, scratch, JS_REGEXP_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003807 __ b(eq, false_label);
3808 // Check for undetectable objects => false.
3809 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
3810 __ tst(ip, Operand(1 << Map::kIsUndetectable));
3811 __ b(ne, false_label);
3812 // Check for JS objects => true.
Steve Block9fac8402011-05-12 15:51:54 +01003813 __ CompareInstanceType(input, scratch, FIRST_JS_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003814 __ b(lo, false_label);
Steve Block9fac8402011-05-12 15:51:54 +01003815 __ CompareInstanceType(input, scratch, LAST_JS_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003816 final_branch_condition = ls;
3817
3818 } else {
3819 final_branch_condition = ne;
3820 __ b(false_label);
3821 // A dead branch instruction will be generated after this point.
3822 }
3823
3824 return final_branch_condition;
3825}
3826
3827
Steve Block1e0659c2011-05-24 12:43:12 +01003828void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
3829 Register result = ToRegister(instr->result());
3830 Label true_label;
3831 Label false_label;
3832 Label done;
3833
3834 EmitIsConstructCall(result, scratch0());
3835 __ b(eq, &true_label);
3836
3837 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3838 __ b(&done);
3839
3840
3841 __ bind(&true_label);
3842 __ LoadRoot(result, Heap::kTrueValueRootIndex);
3843
3844 __ bind(&done);
3845}
3846
3847
3848void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
3849 Register temp1 = ToRegister(instr->TempAt(0));
3850 int true_block = chunk_->LookupDestination(instr->true_block_id());
3851 int false_block = chunk_->LookupDestination(instr->false_block_id());
3852
3853 EmitIsConstructCall(temp1, scratch0());
3854 EmitBranch(true_block, false_block, eq);
3855}
3856
3857
3858void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
3859 ASSERT(!temp1.is(temp2));
3860 // Get the frame pointer for the calling frame.
3861 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3862
3863 // Skip the arguments adaptor frame if it exists.
3864 Label check_frame_marker;
3865 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
3866 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3867 __ b(ne, &check_frame_marker);
3868 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
3869
3870 // Check the marker in the calling frame.
3871 __ bind(&check_frame_marker);
3872 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
3873 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3874}
3875
3876
Ben Murdochb0fe1622011-05-05 13:52:32 +01003877void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
3878 // No code for lazy bailout instruction. Used to capture environment after a
3879 // call for populating the safepoint data with deoptimization data.
3880}
3881
3882
3883void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003884 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003885}
3886
3887
3888void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003889 Register object = ToRegister(instr->object());
3890 Register key = ToRegister(instr->key());
3891 __ Push(object, key);
Steve Block1e0659c2011-05-24 12:43:12 +01003892 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
3893 LPointerMap* pointers = instr->pointer_map();
3894 LEnvironment* env = instr->deoptimization_environment();
3895 RecordPosition(pointers->position());
3896 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003897 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01003898 pointers,
3899 env->deoptimization_index());
Ben Murdochb8e0da22011-05-16 14:20:40 +01003900 __ InvokeBuiltin(Builtins::DELETE, CALL_JS, &safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003901}
3902
3903
3904void LCodeGen::DoStackCheck(LStackCheck* instr) {
3905 // Perform stack overflow check.
3906 Label ok;
3907 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
3908 __ cmp(sp, Operand(ip));
3909 __ b(hs, &ok);
3910 StackCheckStub stub;
3911 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3912 __ bind(&ok);
3913}
3914
3915
3916void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003917 // This is a pseudo-instruction that ensures that the environment here is
3918 // properly registered for deoptimization and records the assembler's PC
3919 // offset.
3920 LEnvironment* environment = instr->environment();
3921 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3922 instr->SpilledDoubleRegisterArray());
3923
3924 // If the environment were already registered, we would have no way of
3925 // backpatching it with the spill slot operands.
3926 ASSERT(!environment->HasBeenRegistered());
3927 RegisterEnvironmentForDeoptimization(environment);
3928 ASSERT(osr_pc_offset_ == -1);
3929 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003930}
3931
3932
3933#undef __
3934
3935} } // namespace v8::internal