blob: dfc48917da603daf2091d57ee19b19fee6fee572 [file] [log] [blame]
Ben Murdochb0fe1622011-05-05 13:52:32 +01001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "arm/lithium-codegen-arm.h"
29#include "code-stubs.h"
30#include "stub-cache.h"
31
32namespace v8 {
33namespace internal {
34
35
36class SafepointGenerator : public PostCallGenerator {
37 public:
38 SafepointGenerator(LCodeGen* codegen,
39 LPointerMap* pointers,
40 int deoptimization_index)
41 : codegen_(codegen),
42 pointers_(pointers),
43 deoptimization_index_(deoptimization_index) { }
44 virtual ~SafepointGenerator() { }
45
46 virtual void Generate() {
47 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
48 }
49
50 private:
51 LCodeGen* codegen_;
52 LPointerMap* pointers_;
53 int deoptimization_index_;
54};
55
56
57#define __ masm()->
58
59bool LCodeGen::GenerateCode() {
60 HPhase phase("Code generation", chunk());
61 ASSERT(is_unused());
62 status_ = GENERATING;
63 CpuFeatures::Scope scope1(VFP3);
64 CpuFeatures::Scope scope2(ARMv7);
65 return GeneratePrologue() &&
66 GenerateBody() &&
67 GenerateDeferredCode() &&
68 GenerateSafepointTable();
69}
70
71
72void LCodeGen::FinishCode(Handle<Code> code) {
73 ASSERT(is_done());
74 code->set_stack_slots(StackSlotCount());
75 code->set_safepoint_table_start(safepoints_.GetCodeOffset());
76 PopulateDeoptimizationData(code);
77}
78
79
80void LCodeGen::Abort(const char* format, ...) {
81 if (FLAG_trace_bailout) {
82 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
83 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
84 va_list arguments;
85 va_start(arguments, format);
86 OS::VPrint(format, arguments);
87 va_end(arguments);
88 PrintF("\n");
89 }
90 status_ = ABORTED;
91}
92
93
94void LCodeGen::Comment(const char* format, ...) {
95 if (!FLAG_code_comments) return;
96 char buffer[4 * KB];
97 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
98 va_list arguments;
99 va_start(arguments, format);
100 builder.AddFormattedList(format, arguments);
101 va_end(arguments);
102
103 // Copy the string before recording it in the assembler to avoid
104 // issues when the stack allocated buffer goes out of scope.
105 size_t length = builder.position();
106 Vector<char> copy = Vector<char>::New(length + 1);
107 memcpy(copy.start(), builder.Finalize(), copy.length());
108 masm()->RecordComment(copy.start());
109}
110
111
112bool LCodeGen::GeneratePrologue() {
113 ASSERT(is_generating());
114
115#ifdef DEBUG
116 if (strlen(FLAG_stop_at) > 0 &&
117 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
118 __ stop("stop_at");
119 }
120#endif
121
122 // r1: Callee's JS function.
123 // cp: Callee's context.
124 // fp: Caller's frame pointer.
125 // lr: Caller's pc.
126
127 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
128 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
129
130 // Reserve space for the stack slots needed by the code.
131 int slots = StackSlotCount();
132 if (slots > 0) {
133 if (FLAG_debug_code) {
134 __ mov(r0, Operand(slots));
135 __ mov(r2, Operand(kSlotsZapValue));
136 Label loop;
137 __ bind(&loop);
138 __ push(r2);
139 __ sub(r0, r0, Operand(1), SetCC);
140 __ b(ne, &loop);
141 } else {
142 __ sub(sp, sp, Operand(slots * kPointerSize));
143 }
144 }
145
146 // Trace the call.
147 if (FLAG_trace) {
148 __ CallRuntime(Runtime::kTraceEnter, 0);
149 }
150 return !is_aborted();
151}
152
153
154bool LCodeGen::GenerateBody() {
155 ASSERT(is_generating());
156 bool emit_instructions = true;
157 for (current_instruction_ = 0;
158 !is_aborted() && current_instruction_ < instructions_->length();
159 current_instruction_++) {
160 LInstruction* instr = instructions_->at(current_instruction_);
161 if (instr->IsLabel()) {
162 LLabel* label = LLabel::cast(instr);
163 emit_instructions = !label->HasReplacement();
164 }
165
166 if (emit_instructions) {
167 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
168 instr->CompileToNative(this);
169 }
170 }
171 return !is_aborted();
172}
173
174
175LInstruction* LCodeGen::GetNextInstruction() {
176 if (current_instruction_ < instructions_->length() - 1) {
177 return instructions_->at(current_instruction_ + 1);
178 } else {
179 return NULL;
180 }
181}
182
183
184bool LCodeGen::GenerateDeferredCode() {
185 ASSERT(is_generating());
186 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
187 LDeferredCode* code = deferred_[i];
188 __ bind(code->entry());
189 code->Generate();
190 __ jmp(code->exit());
191 }
192
193 // Deferred code is the last part of the instruction sequence. Mark
194 // the generated code as done unless we bailed out.
195 if (!is_aborted()) status_ = DONE;
196 return !is_aborted();
197}
198
199
200bool LCodeGen::GenerateSafepointTable() {
201 ASSERT(is_done());
202 safepoints_.Emit(masm(), StackSlotCount());
203 return !is_aborted();
204}
205
206
207Register LCodeGen::ToRegister(int index) const {
208 return Register::FromAllocationIndex(index);
209}
210
211
212DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
213 return DoubleRegister::FromAllocationIndex(index);
214}
215
216
217Register LCodeGen::ToRegister(LOperand* op) const {
218 ASSERT(op->IsRegister());
219 return ToRegister(op->index());
220}
221
222
223Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
224 if (op->IsRegister()) {
225 return ToRegister(op->index());
226 } else if (op->IsConstantOperand()) {
227 __ mov(scratch, ToOperand(op));
228 return scratch;
229 } else if (op->IsStackSlot() || op->IsArgument()) {
230 __ ldr(scratch, ToMemOperand(op));
231 return scratch;
232 }
233 UNREACHABLE();
234 return scratch;
235}
236
237
238DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
239 ASSERT(op->IsDoubleRegister());
240 return ToDoubleRegister(op->index());
241}
242
243
244DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
245 SwVfpRegister flt_scratch,
246 DoubleRegister dbl_scratch) {
247 if (op->IsDoubleRegister()) {
248 return ToDoubleRegister(op->index());
249 } else if (op->IsConstantOperand()) {
250 LConstantOperand* const_op = LConstantOperand::cast(op);
251 Handle<Object> literal = chunk_->LookupLiteral(const_op);
252 Representation r = chunk_->LookupLiteralRepresentation(const_op);
253 if (r.IsInteger32()) {
254 ASSERT(literal->IsNumber());
255 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
256 __ vmov(flt_scratch, ip);
257 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
258 return dbl_scratch;
259 } else if (r.IsDouble()) {
260 Abort("unsupported double immediate");
261 } else if (r.IsTagged()) {
262 Abort("unsupported tagged immediate");
263 }
264 } else if (op->IsStackSlot() || op->IsArgument()) {
265 // TODO(regis): Why is vldr not taking a MemOperand?
266 // __ vldr(dbl_scratch, ToMemOperand(op));
267 MemOperand mem_op = ToMemOperand(op);
268 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
269 return dbl_scratch;
270 }
271 UNREACHABLE();
272 return dbl_scratch;
273}
274
275
276int LCodeGen::ToInteger32(LConstantOperand* op) const {
277 Handle<Object> value = chunk_->LookupLiteral(op);
278 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
279 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
280 value->Number());
281 return static_cast<int32_t>(value->Number());
282}
283
284
285Operand LCodeGen::ToOperand(LOperand* op) {
286 if (op->IsConstantOperand()) {
287 LConstantOperand* const_op = LConstantOperand::cast(op);
288 Handle<Object> literal = chunk_->LookupLiteral(const_op);
289 Representation r = chunk_->LookupLiteralRepresentation(const_op);
290 if (r.IsInteger32()) {
291 ASSERT(literal->IsNumber());
292 return Operand(static_cast<int32_t>(literal->Number()));
293 } else if (r.IsDouble()) {
294 Abort("ToOperand Unsupported double immediate.");
295 }
296 ASSERT(r.IsTagged());
297 return Operand(literal);
298 } else if (op->IsRegister()) {
299 return Operand(ToRegister(op));
300 } else if (op->IsDoubleRegister()) {
301 Abort("ToOperand IsDoubleRegister unimplemented");
302 return Operand(0);
303 }
304 // Stack slots not implemented, use ToMemOperand instead.
305 UNREACHABLE();
306 return Operand(0);
307}
308
309
310MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
311 // TODO(regis): Revisit.
312 ASSERT(!op->IsRegister());
313 ASSERT(!op->IsDoubleRegister());
314 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
315 int index = op->index();
316 if (index >= 0) {
317 // Local or spill slot. Skip the frame pointer, function, and
318 // context in the fixed part of the frame.
319 return MemOperand(fp, -(index + 3) * kPointerSize);
320 } else {
321 // Incoming parameter. Skip the return address.
322 return MemOperand(fp, -(index - 1) * kPointerSize);
323 }
324}
325
326
327void LCodeGen::AddToTranslation(Translation* translation,
328 LOperand* op,
329 bool is_tagged) {
330 if (op == NULL) {
331 // TODO(twuerthinger): Introduce marker operands to indicate that this value
332 // is not present and must be reconstructed from the deoptimizer. Currently
333 // this is only used for the arguments object.
334 translation->StoreArgumentsObject();
335 } else if (op->IsStackSlot()) {
336 if (is_tagged) {
337 translation->StoreStackSlot(op->index());
338 } else {
339 translation->StoreInt32StackSlot(op->index());
340 }
341 } else if (op->IsDoubleStackSlot()) {
342 translation->StoreDoubleStackSlot(op->index());
343 } else if (op->IsArgument()) {
344 ASSERT(is_tagged);
345 int src_index = StackSlotCount() + op->index();
346 translation->StoreStackSlot(src_index);
347 } else if (op->IsRegister()) {
348 Register reg = ToRegister(op);
349 if (is_tagged) {
350 translation->StoreRegister(reg);
351 } else {
352 translation->StoreInt32Register(reg);
353 }
354 } else if (op->IsDoubleRegister()) {
355 DoubleRegister reg = ToDoubleRegister(op);
356 translation->StoreDoubleRegister(reg);
357 } else if (op->IsConstantOperand()) {
358 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
359 int src_index = DefineDeoptimizationLiteral(literal);
360 translation->StoreLiteral(src_index);
361 } else {
362 UNREACHABLE();
363 }
364}
365
366
367void LCodeGen::CallCode(Handle<Code> code,
368 RelocInfo::Mode mode,
369 LInstruction* instr) {
370 if (instr != NULL) {
371 LPointerMap* pointers = instr->pointer_map();
372 RecordPosition(pointers->position());
373 __ Call(code, mode);
374 RegisterLazyDeoptimization(instr);
375 } else {
376 LPointerMap no_pointers(0);
377 RecordPosition(no_pointers.position());
378 __ Call(code, mode);
379 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
380 }
381}
382
383
384void LCodeGen::CallRuntime(Runtime::Function* function,
385 int num_arguments,
386 LInstruction* instr) {
387 ASSERT(instr != NULL);
388 LPointerMap* pointers = instr->pointer_map();
389 ASSERT(pointers != NULL);
390 RecordPosition(pointers->position());
391
392 __ CallRuntime(function, num_arguments);
393 // Runtime calls to Throw are not supposed to ever return at the
394 // call site, so don't register lazy deoptimization for these. We do
395 // however have to record a safepoint since throwing exceptions can
396 // cause garbage collections.
397 if (!instr->IsThrow()) {
398 RegisterLazyDeoptimization(instr);
399 } else {
400 RecordSafepoint(instr->pointer_map(), Safepoint::kNoDeoptimizationIndex);
401 }
402}
403
404
405void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
406 // Create the environment to bailout to. If the call has side effects
407 // execution has to continue after the call otherwise execution can continue
408 // from a previous bailout point repeating the call.
409 LEnvironment* deoptimization_environment;
410 if (instr->HasDeoptimizationEnvironment()) {
411 deoptimization_environment = instr->deoptimization_environment();
412 } else {
413 deoptimization_environment = instr->environment();
414 }
415
416 RegisterEnvironmentForDeoptimization(deoptimization_environment);
417 RecordSafepoint(instr->pointer_map(),
418 deoptimization_environment->deoptimization_index());
419}
420
421
422void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
423 if (!environment->HasBeenRegistered()) {
424 // Physical stack frame layout:
425 // -x ............. -4 0 ..................................... y
426 // [incoming arguments] [spill slots] [pushed outgoing arguments]
427
428 // Layout of the environment:
429 // 0 ..................................................... size-1
430 // [parameters] [locals] [expression stack including arguments]
431
432 // Layout of the translation:
433 // 0 ........................................................ size - 1 + 4
434 // [expression stack including arguments] [locals] [4 words] [parameters]
435 // |>------------ translation_size ------------<|
436
437 int frame_count = 0;
438 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
439 ++frame_count;
440 }
441 Translation translation(&translations_, frame_count);
442 environment->WriteTranslation(this, &translation);
443 int deoptimization_index = deoptimizations_.length();
444 environment->Register(deoptimization_index, translation.index());
445 deoptimizations_.Add(environment);
446 }
447}
448
449
450void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
451 RegisterEnvironmentForDeoptimization(environment);
452 ASSERT(environment->HasBeenRegistered());
453 int id = environment->deoptimization_index();
454 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
455 ASSERT(entry != NULL);
456 if (entry == NULL) {
457 Abort("bailout was not prepared");
458 return;
459 }
460
461 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
462
463 if (FLAG_deopt_every_n_times == 1 &&
464 info_->shared_info()->opt_count() == id) {
465 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
466 return;
467 }
468
469 if (cc == no_condition) {
470 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt");
471 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
472 } else {
473 if (FLAG_trap_on_deopt) {
474 Label done;
475 __ b(&done, NegateCondition(cc));
476 __ stop("trap_on_deopt");
477 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
478 __ bind(&done);
479 } else {
480 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc);
481 }
482 }
483}
484
485
486void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
487 int length = deoptimizations_.length();
488 if (length == 0) return;
489 ASSERT(FLAG_deopt);
490 Handle<DeoptimizationInputData> data =
491 Factory::NewDeoptimizationInputData(length, TENURED);
492
493 data->SetTranslationByteArray(*translations_.CreateByteArray());
494 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
495
496 Handle<FixedArray> literals =
497 Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
498 for (int i = 0; i < deoptimization_literals_.length(); i++) {
499 literals->set(i, *deoptimization_literals_[i]);
500 }
501 data->SetLiteralArray(*literals);
502
503 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
504 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
505
506 // Populate the deoptimization entries.
507 for (int i = 0; i < length; i++) {
508 LEnvironment* env = deoptimizations_[i];
509 data->SetAstId(i, Smi::FromInt(env->ast_id()));
510 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
511 data->SetArgumentsStackHeight(i,
512 Smi::FromInt(env->arguments_stack_height()));
513 }
514 code->set_deoptimization_data(*data);
515}
516
517
518int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
519 int result = deoptimization_literals_.length();
520 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
521 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
522 }
523 deoptimization_literals_.Add(literal);
524 return result;
525}
526
527
528void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
529 ASSERT(deoptimization_literals_.length() == 0);
530
531 const ZoneList<Handle<JSFunction> >* inlined_closures =
532 chunk()->inlined_closures();
533
534 for (int i = 0, length = inlined_closures->length();
535 i < length;
536 i++) {
537 DefineDeoptimizationLiteral(inlined_closures->at(i));
538 }
539
540 inlined_function_count_ = deoptimization_literals_.length();
541}
542
543
544void LCodeGen::RecordSafepoint(LPointerMap* pointers,
545 int deoptimization_index) {
546 const ZoneList<LOperand*>* operands = pointers->operands();
547 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
548 deoptimization_index);
549 for (int i = 0; i < operands->length(); i++) {
550 LOperand* pointer = operands->at(i);
551 if (pointer->IsStackSlot()) {
552 safepoint.DefinePointerSlot(pointer->index());
553 }
554 }
555}
556
557
558void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
559 int arguments,
560 int deoptimization_index) {
561 const ZoneList<LOperand*>* operands = pointers->operands();
562 Safepoint safepoint =
563 safepoints_.DefineSafepointWithRegisters(
564 masm(), arguments, deoptimization_index);
565 for (int i = 0; i < operands->length(); i++) {
566 LOperand* pointer = operands->at(i);
567 if (pointer->IsStackSlot()) {
568 safepoint.DefinePointerSlot(pointer->index());
569 } else if (pointer->IsRegister()) {
570 safepoint.DefinePointerRegister(ToRegister(pointer));
571 }
572 }
573 // Register cp always contains a pointer to the context.
574 safepoint.DefinePointerRegister(cp);
575}
576
577
578void LCodeGen::RecordPosition(int position) {
579 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
580 masm()->positions_recorder()->RecordPosition(position);
581}
582
583
584void LCodeGen::DoLabel(LLabel* label) {
585 if (label->is_loop_header()) {
586 Comment(";;; B%d - LOOP entry", label->block_id());
587 } else {
588 Comment(";;; B%d", label->block_id());
589 }
590 __ bind(label->label());
591 current_block_ = label->block_id();
592 LCodeGen::DoGap(label);
593}
594
595
596void LCodeGen::DoParallelMove(LParallelMove* move) {
597 // d0 must always be a scratch register.
598 DoubleRegister dbl_scratch = d0;
599 LUnallocated marker_operand(LUnallocated::NONE);
600
601 Register core_scratch = r9;
602 bool destroys_core_scratch = false;
603
604 LGapResolver resolver(move->move_operands(), &marker_operand);
605 const ZoneList<LMoveOperands>* moves = resolver.ResolveInReverseOrder();
606 for (int i = moves->length() - 1; i >= 0; --i) {
607 LMoveOperands move = moves->at(i);
608 LOperand* from = move.from();
609 LOperand* to = move.to();
610 ASSERT(!from->IsDoubleRegister() ||
611 !ToDoubleRegister(from).is(dbl_scratch));
612 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(dbl_scratch));
613 ASSERT(!from->IsRegister() || !ToRegister(from).is(core_scratch));
614 ASSERT(!to->IsRegister() || !ToRegister(to).is(core_scratch));
615 if (from == &marker_operand) {
616 if (to->IsRegister()) {
617 __ mov(ToRegister(to), core_scratch);
618 ASSERT(destroys_core_scratch);
619 } else if (to->IsStackSlot()) {
620 __ str(core_scratch, ToMemOperand(to));
621 ASSERT(destroys_core_scratch);
622 } else if (to->IsDoubleRegister()) {
623 __ vmov(ToDoubleRegister(to), dbl_scratch);
624 } else {
625 ASSERT(to->IsDoubleStackSlot());
626 // TODO(regis): Why is vstr not taking a MemOperand?
627 // __ vstr(dbl_scratch, ToMemOperand(to));
628 MemOperand to_operand = ToMemOperand(to);
629 __ vstr(dbl_scratch, to_operand.rn(), to_operand.offset());
630 }
631 } else if (to == &marker_operand) {
632 if (from->IsRegister() || from->IsConstantOperand()) {
633 __ mov(core_scratch, ToOperand(from));
634 destroys_core_scratch = true;
635 } else if (from->IsStackSlot()) {
636 __ ldr(core_scratch, ToMemOperand(from));
637 destroys_core_scratch = true;
638 } else if (from->IsDoubleRegister()) {
639 __ vmov(dbl_scratch, ToDoubleRegister(from));
640 } else {
641 ASSERT(from->IsDoubleStackSlot());
642 // TODO(regis): Why is vldr not taking a MemOperand?
643 // __ vldr(dbl_scratch, ToMemOperand(from));
644 MemOperand from_operand = ToMemOperand(from);
645 __ vldr(dbl_scratch, from_operand.rn(), from_operand.offset());
646 }
647 } else if (from->IsConstantOperand()) {
648 if (to->IsRegister()) {
649 __ mov(ToRegister(to), ToOperand(from));
650 } else {
651 ASSERT(to->IsStackSlot());
652 __ mov(ip, ToOperand(from));
653 __ str(ip, ToMemOperand(to));
654 }
655 } else if (from->IsRegister()) {
656 if (to->IsRegister()) {
657 __ mov(ToRegister(to), ToOperand(from));
658 } else {
659 ASSERT(to->IsStackSlot());
660 __ str(ToRegister(from), ToMemOperand(to));
661 }
662 } else if (to->IsRegister()) {
663 ASSERT(from->IsStackSlot());
664 __ ldr(ToRegister(to), ToMemOperand(from));
665 } else if (from->IsStackSlot()) {
666 ASSERT(to->IsStackSlot());
667 __ ldr(ip, ToMemOperand(from));
668 __ str(ip, ToMemOperand(to));
669 } else if (from->IsDoubleRegister()) {
670 if (to->IsDoubleRegister()) {
671 __ vmov(ToDoubleRegister(to), ToDoubleRegister(from));
672 } else {
673 ASSERT(to->IsDoubleStackSlot());
674 // TODO(regis): Why is vstr not taking a MemOperand?
675 // __ vstr(dbl_scratch, ToMemOperand(to));
676 MemOperand to_operand = ToMemOperand(to);
677 __ vstr(ToDoubleRegister(from), to_operand.rn(), to_operand.offset());
678 }
679 } else if (to->IsDoubleRegister()) {
680 ASSERT(from->IsDoubleStackSlot());
681 // TODO(regis): Why is vldr not taking a MemOperand?
682 // __ vldr(ToDoubleRegister(to), ToMemOperand(from));
683 MemOperand from_operand = ToMemOperand(from);
684 __ vldr(ToDoubleRegister(to), from_operand.rn(), from_operand.offset());
685 } else {
686 ASSERT(to->IsDoubleStackSlot() && from->IsDoubleStackSlot());
687 // TODO(regis): Why is vldr not taking a MemOperand?
688 // __ vldr(dbl_scratch, ToMemOperand(from));
689 MemOperand from_operand = ToMemOperand(from);
690 __ vldr(dbl_scratch, from_operand.rn(), from_operand.offset());
691 // TODO(regis): Why is vstr not taking a MemOperand?
692 // __ vstr(dbl_scratch, ToMemOperand(to));
693 MemOperand to_operand = ToMemOperand(to);
694 __ vstr(dbl_scratch, to_operand.rn(), to_operand.offset());
695 }
696 }
697
698 if (destroys_core_scratch) {
699 __ ldr(core_scratch, MemOperand(fp, -kPointerSize));
700 }
701
702 LInstruction* next = GetNextInstruction();
703 if (next != NULL && next->IsLazyBailout()) {
704 int pc = masm()->pc_offset();
705 safepoints_.SetPcAfterGap(pc);
706 }
707}
708
709
710void LCodeGen::DoGap(LGap* gap) {
711 for (int i = LGap::FIRST_INNER_POSITION;
712 i <= LGap::LAST_INNER_POSITION;
713 i++) {
714 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
715 LParallelMove* move = gap->GetParallelMove(inner_pos);
716 if (move != NULL) DoParallelMove(move);
717 }
718
719 LInstruction* next = GetNextInstruction();
720 if (next != NULL && next->IsLazyBailout()) {
721 int pc = masm()->pc_offset();
722 safepoints_.SetPcAfterGap(pc);
723 }
724}
725
726
727void LCodeGen::DoParameter(LParameter* instr) {
728 // Nothing to do.
729}
730
731
732void LCodeGen::DoCallStub(LCallStub* instr) {
733 Abort("DoCallStub unimplemented.");
734}
735
736
737void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
738 // Nothing to do.
739}
740
741
742void LCodeGen::DoModI(LModI* instr) {
743 Abort("DoModI unimplemented.");
744}
745
746
747void LCodeGen::DoDivI(LDivI* instr) {
748 Abort("DoDivI unimplemented.");
749}
750
751
752void LCodeGen::DoMulI(LMulI* instr) {
753 Register left = ToRegister(instr->left());
754 Register scratch = r9;
755 Register right = EmitLoadRegister(instr->right(), scratch);
756
757 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
758 !instr->right()->IsConstantOperand()) {
759 __ orr(ToRegister(instr->temp()), left, right);
760 }
761
762 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
763 // scratch:left = left * right.
764 __ smull(scratch, left, left, right);
765 __ mov(ip, Operand(left, ASR, 31));
766 __ cmp(ip, Operand(scratch));
767 DeoptimizeIf(ne, instr->environment());
768 } else {
769 __ mul(left, left, right);
770 }
771
772 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
773 // Bail out if the result is supposed to be negative zero.
774 Label done;
775 __ tst(left, Operand(left));
776 __ b(ne, &done);
777 if (instr->right()->IsConstantOperand()) {
778 if (ToInteger32(LConstantOperand::cast(instr->right())) < 0) {
779 DeoptimizeIf(no_condition, instr->environment());
780 }
781 } else {
782 // Test the non-zero operand for negative sign.
783 __ cmp(ToRegister(instr->temp()), Operand(0));
784 DeoptimizeIf(mi, instr->environment());
785 }
786 __ bind(&done);
787 }
788}
789
790
791void LCodeGen::DoBitI(LBitI* instr) {
792 LOperand* left = instr->left();
793 LOperand* right = instr->right();
794 ASSERT(left->Equals(instr->result()));
795 ASSERT(left->IsRegister());
796 Register result = ToRegister(left);
797 Register right_reg = EmitLoadRegister(right, ip);
798 switch (instr->op()) {
799 case Token::BIT_AND:
800 __ and_(result, ToRegister(left), Operand(right_reg));
801 break;
802 case Token::BIT_OR:
803 __ orr(result, ToRegister(left), Operand(right_reg));
804 break;
805 case Token::BIT_XOR:
806 __ eor(result, ToRegister(left), Operand(right_reg));
807 break;
808 default:
809 UNREACHABLE();
810 break;
811 }
812}
813
814
815void LCodeGen::DoShiftI(LShiftI* instr) {
816 LOperand* left = instr->left();
817 LOperand* right = instr->right();
818 ASSERT(left->Equals(instr->result()));
819 ASSERT(left->IsRegister());
820 Register result = ToRegister(left);
821 if (right->IsRegister()) {
822 // Mask the right operand.
823 __ and_(r9, ToRegister(right), Operand(0x1F));
824 switch (instr->op()) {
825 case Token::SAR:
826 __ mov(result, Operand(result, ASR, r9));
827 break;
828 case Token::SHR:
829 if (instr->can_deopt()) {
830 __ mov(result, Operand(result, LSR, r9), SetCC);
831 DeoptimizeIf(mi, instr->environment());
832 } else {
833 __ mov(result, Operand(result, LSR, r9));
834 }
835 break;
836 case Token::SHL:
837 __ mov(result, Operand(result, LSL, r9));
838 break;
839 default:
840 UNREACHABLE();
841 break;
842 }
843 } else {
844 int value = ToInteger32(LConstantOperand::cast(right));
845 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
846 switch (instr->op()) {
847 case Token::SAR:
848 if (shift_count != 0) {
849 __ mov(result, Operand(result, ASR, shift_count));
850 }
851 break;
852 case Token::SHR:
853 if (shift_count == 0 && instr->can_deopt()) {
854 __ tst(result, Operand(0x80000000));
855 DeoptimizeIf(ne, instr->environment());
856 } else {
857 __ mov(result, Operand(result, LSR, shift_count));
858 }
859 break;
860 case Token::SHL:
861 if (shift_count != 0) {
862 __ mov(result, Operand(result, LSL, shift_count));
863 }
864 break;
865 default:
866 UNREACHABLE();
867 break;
868 }
869 }
870}
871
872
873void LCodeGen::DoSubI(LSubI* instr) {
874 Register left = ToRegister(instr->left());
875 Register right = EmitLoadRegister(instr->right(), ip);
876 ASSERT(instr->left()->Equals(instr->result()));
877 __ sub(left, left, right, SetCC);
878 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
879 DeoptimizeIf(vs, instr->environment());
880 }
881}
882
883
884void LCodeGen::DoConstantI(LConstantI* instr) {
885 ASSERT(instr->result()->IsRegister());
886 __ mov(ToRegister(instr->result()), Operand(instr->value()));
887}
888
889
890void LCodeGen::DoConstantD(LConstantD* instr) {
891 Abort("DoConstantD unimplemented.");
892}
893
894
895void LCodeGen::DoConstantT(LConstantT* instr) {
896 ASSERT(instr->result()->IsRegister());
897 __ mov(ToRegister(instr->result()), Operand(instr->value()));
898}
899
900
901void LCodeGen::DoArrayLength(LArrayLength* instr) {
902 Register result = ToRegister(instr->result());
903
904 if (instr->hydrogen()->value()->IsLoadElements()) {
905 // We load the length directly from the elements array.
906 Register elements = ToRegister(instr->input());
907 __ ldr(result, FieldMemOperand(elements, FixedArray::kLengthOffset));
908 } else {
909 // Check that the receiver really is an array.
910 Register array = ToRegister(instr->input());
911 Register temporary = ToRegister(instr->temporary());
912 __ CompareObjectType(array, temporary, temporary, JS_ARRAY_TYPE);
913 DeoptimizeIf(ne, instr->environment());
914
915 // Load length directly from the array.
916 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
917 }
918 Abort("DoArrayLength untested.");
919}
920
921
922void LCodeGen::DoValueOf(LValueOf* instr) {
923 Abort("DoValueOf unimplemented.");
924}
925
926
927void LCodeGen::DoBitNotI(LBitNotI* instr) {
928 LOperand* input = instr->input();
929 ASSERT(input->Equals(instr->result()));
930 __ mvn(ToRegister(input), Operand(ToRegister(input)));
931 Abort("DoBitNotI untested.");
932}
933
934
935void LCodeGen::DoThrow(LThrow* instr) {
936 Register input_reg = EmitLoadRegister(instr->input(), ip);
937 __ push(input_reg);
938 CallRuntime(Runtime::kThrow, 1, instr);
939
940 if (FLAG_debug_code) {
941 __ stop("Unreachable code.");
942 }
943}
944
945
946void LCodeGen::DoAddI(LAddI* instr) {
947 LOperand* left = instr->left();
948 LOperand* right = instr->right();
949 ASSERT(left->Equals(instr->result()));
950
951 Register right_reg = EmitLoadRegister(right, ip);
952 __ add(ToRegister(left), ToRegister(left), Operand(right_reg), SetCC);
953
954 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
955 DeoptimizeIf(vs, instr->environment());
956 }
957}
958
959
960void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
961 DoubleRegister left = ToDoubleRegister(instr->left());
962 DoubleRegister right = ToDoubleRegister(instr->right());
963 switch (instr->op()) {
964 case Token::ADD:
965 __ vadd(left, left, right);
966 break;
967 case Token::SUB:
968 __ vsub(left, left, right);
969 break;
970 case Token::MUL:
971 __ vmul(left, left, right);
972 break;
973 case Token::DIV:
974 __ vdiv(left, left, right);
975 break;
976 case Token::MOD: {
977 Abort("DoArithmeticD unimplemented for MOD.");
978 break;
979 }
980 default:
981 UNREACHABLE();
982 break;
983 }
984}
985
986
987void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
988 ASSERT(ToRegister(instr->left()).is(r1));
989 ASSERT(ToRegister(instr->right()).is(r0));
990 ASSERT(ToRegister(instr->result()).is(r0));
991
992 // TODO(regis): Implement TypeRecordingBinaryOpStub and replace current
993 // GenericBinaryOpStub:
994 // TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
995 GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, r1, r0);
996 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
997}
998
999
1000int LCodeGen::GetNextEmittedBlock(int block) {
1001 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1002 LLabel* label = chunk_->GetLabel(i);
1003 if (!label->HasReplacement()) return i;
1004 }
1005 return -1;
1006}
1007
1008
1009void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1010 int next_block = GetNextEmittedBlock(current_block_);
1011 right_block = chunk_->LookupDestination(right_block);
1012 left_block = chunk_->LookupDestination(left_block);
1013
1014 if (right_block == left_block) {
1015 EmitGoto(left_block);
1016 } else if (left_block == next_block) {
1017 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1018 } else if (right_block == next_block) {
1019 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1020 } else {
1021 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1022 __ b(chunk_->GetAssemblyLabel(right_block));
1023 }
1024}
1025
1026
1027void LCodeGen::DoBranch(LBranch* instr) {
1028 int true_block = chunk_->LookupDestination(instr->true_block_id());
1029 int false_block = chunk_->LookupDestination(instr->false_block_id());
1030
1031 Representation r = instr->hydrogen()->representation();
1032 if (r.IsInteger32()) {
1033 Register reg = ToRegister(instr->input());
1034 __ cmp(reg, Operand(0));
1035 EmitBranch(true_block, false_block, nz);
1036 } else if (r.IsDouble()) {
1037 DoubleRegister reg = ToDoubleRegister(instr->input());
1038 __ vcmp(reg, 0.0);
1039 EmitBranch(true_block, false_block, ne);
1040 } else {
1041 ASSERT(r.IsTagged());
1042 Register reg = ToRegister(instr->input());
1043 if (instr->hydrogen()->type().IsBoolean()) {
1044 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1045 __ cmp(reg, ip);
1046 EmitBranch(true_block, false_block, eq);
1047 } else {
1048 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1049 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1050
1051 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1052 __ cmp(reg, ip);
1053 __ b(eq, false_label);
1054 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1055 __ cmp(reg, ip);
1056 __ b(eq, true_label);
1057 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1058 __ cmp(reg, ip);
1059 __ b(eq, false_label);
1060 __ cmp(reg, Operand(0));
1061 __ b(eq, false_label);
1062 __ tst(reg, Operand(kSmiTagMask));
1063 __ b(eq, true_label);
1064
1065 // Test for double values. Zero is false.
1066 Label call_stub;
1067 DoubleRegister dbl_scratch = d0;
1068 Register core_scratch = r9;
1069 ASSERT(!reg.is(core_scratch));
1070 __ ldr(core_scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1071 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1072 __ cmp(core_scratch, Operand(ip));
1073 __ b(ne, &call_stub);
1074 __ sub(ip, reg, Operand(kHeapObjectTag));
1075 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
1076 __ vcmp(dbl_scratch, 0.0);
1077 __ b(eq, false_label);
1078 __ b(true_label);
1079
1080 // The conversion stub doesn't cause garbage collections so it's
1081 // safe to not record a safepoint after the call.
1082 __ bind(&call_stub);
1083 ToBooleanStub stub(reg);
1084 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1085 __ stm(db_w, sp, saved_regs);
1086 __ CallStub(&stub);
1087 __ cmp(reg, Operand(0));
1088 __ ldm(ia_w, sp, saved_regs);
1089 EmitBranch(true_block, false_block, nz);
1090 }
1091 }
1092}
1093
1094
1095void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1096 // TODO(srdjan): Perform stack overflow check if this goto needs it
1097 // before jumping.
1098 block = chunk_->LookupDestination(block);
1099 int next_block = GetNextEmittedBlock(current_block_);
1100 if (block != next_block) {
1101 __ jmp(chunk_->GetAssemblyLabel(block));
1102 }
1103}
1104
1105
1106void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1107 UNIMPLEMENTED();
1108}
1109
1110
1111void LCodeGen::DoGoto(LGoto* instr) {
1112 // TODO(srdjan): Implement deferred stack check.
1113 EmitGoto(instr->block_id(), NULL);
1114}
1115
1116
1117Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1118 Condition cond = no_condition;
1119 switch (op) {
1120 case Token::EQ:
1121 case Token::EQ_STRICT:
1122 cond = eq;
1123 break;
1124 case Token::LT:
1125 cond = is_unsigned ? lo : lt;
1126 break;
1127 case Token::GT:
1128 cond = is_unsigned ? hi : gt;
1129 break;
1130 case Token::LTE:
1131 cond = is_unsigned ? ls : le;
1132 break;
1133 case Token::GTE:
1134 cond = is_unsigned ? hs : ge;
1135 break;
1136 case Token::IN:
1137 case Token::INSTANCEOF:
1138 default:
1139 UNREACHABLE();
1140 }
1141 return cond;
1142}
1143
1144
1145void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1146 __ cmp(ToRegister(left), ToOperand(right));
1147 Abort("EmitCmpI untested.");
1148}
1149
1150
1151void LCodeGen::DoCmpID(LCmpID* instr) {
1152 Abort("DoCmpID unimplemented.");
1153}
1154
1155
1156void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1157 Abort("DoCmpIDAndBranch unimplemented.");
1158}
1159
1160
1161void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1162 Register left = ToRegister(instr->left());
1163 Register right = ToRegister(instr->right());
1164 Register result = ToRegister(instr->result());
1165
1166 __ cmp(left, Operand(right));
1167 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1168 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1169 Abort("DoCmpJSObjectEq untested.");
1170}
1171
1172
1173void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1174 Abort("DoCmpJSObjectEqAndBranch unimplemented.");
1175}
1176
1177
1178void LCodeGen::DoIsNull(LIsNull* instr) {
1179 Abort("DoIsNull unimplemented.");
1180}
1181
1182
1183void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1184 Register reg = ToRegister(instr->input());
1185
1186 // TODO(fsc): If the expression is known to be a smi, then it's
1187 // definitely not null. Jump to the false block.
1188
1189 int true_block = chunk_->LookupDestination(instr->true_block_id());
1190 int false_block = chunk_->LookupDestination(instr->false_block_id());
1191
1192 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1193 __ cmp(reg, ip);
1194 if (instr->is_strict()) {
1195 EmitBranch(true_block, false_block, eq);
1196 } else {
1197 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1198 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1199 __ b(eq, true_label);
1200 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1201 __ cmp(reg, ip);
1202 __ b(eq, true_label);
1203 __ tst(reg, Operand(kSmiTagMask));
1204 __ b(eq, false_label);
1205 // Check for undetectable objects by looking in the bit field in
1206 // the map. The object has already been smi checked.
1207 Register scratch = ToRegister(instr->temp());
1208 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1209 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1210 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1211 EmitBranch(true_block, false_block, ne);
1212 }
1213}
1214
1215
1216Condition LCodeGen::EmitIsObject(Register input,
1217 Register temp1,
1218 Register temp2,
1219 Label* is_not_object,
1220 Label* is_object) {
1221 Abort("EmitIsObject unimplemented.");
1222 return ne;
1223}
1224
1225
1226void LCodeGen::DoIsObject(LIsObject* instr) {
1227 Abort("DoIsObject unimplemented.");
1228}
1229
1230
1231void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1232 Abort("DoIsObjectAndBranch unimplemented.");
1233}
1234
1235
1236void LCodeGen::DoIsSmi(LIsSmi* instr) {
1237 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1238 Register result = ToRegister(instr->result());
1239 Register input_reg = EmitLoadRegister(instr->input(), ip);
1240 __ tst(input_reg, Operand(kSmiTagMask));
1241 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1242 Label done;
1243 __ b(eq, &done);
1244 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1245 __ bind(&done);
1246}
1247
1248
1249void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1250 int true_block = chunk_->LookupDestination(instr->true_block_id());
1251 int false_block = chunk_->LookupDestination(instr->false_block_id());
1252
1253 Register input_reg = EmitLoadRegister(instr->input(), ip);
1254 __ tst(input_reg, Operand(kSmiTagMask));
1255 EmitBranch(true_block, false_block, eq);
1256}
1257
1258
1259InstanceType LHasInstanceType::TestType() {
1260 InstanceType from = hydrogen()->from();
1261 InstanceType to = hydrogen()->to();
1262 if (from == FIRST_TYPE) return to;
1263 ASSERT(from == to || to == LAST_TYPE);
1264 return from;
1265}
1266
1267
1268Condition LHasInstanceType::BranchCondition() {
1269 InstanceType from = hydrogen()->from();
1270 InstanceType to = hydrogen()->to();
1271 if (from == to) return eq;
1272 if (to == LAST_TYPE) return hs;
1273 if (from == FIRST_TYPE) return ls;
1274 UNREACHABLE();
1275 return eq;
1276}
1277
1278
1279void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1280 Abort("DoHasInstanceType unimplemented.");
1281}
1282
1283
1284void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1285 Register input = ToRegister(instr->input());
1286 Register temp = ToRegister(instr->temp());
1287
1288 int true_block = chunk_->LookupDestination(instr->true_block_id());
1289 int false_block = chunk_->LookupDestination(instr->false_block_id());
1290
1291 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1292
1293 __ tst(input, Operand(kSmiTagMask));
1294 __ b(eq, false_label);
1295
1296 __ CompareObjectType(input, temp, temp, instr->TestType());
1297 EmitBranch(true_block, false_block, instr->BranchCondition());
1298}
1299
1300
1301void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1302 Abort("DoHasCachedArrayIndex unimplemented.");
1303}
1304
1305
1306void LCodeGen::DoHasCachedArrayIndexAndBranch(
1307 LHasCachedArrayIndexAndBranch* instr) {
1308 Abort("DoHasCachedArrayIndexAndBranch unimplemented.");
1309}
1310
1311
1312// Branches to a label or falls through with the answer in the z flag. Trashes
1313// the temp registers, but not the input. Only input and temp2 may alias.
1314void LCodeGen::EmitClassOfTest(Label* is_true,
1315 Label* is_false,
1316 Handle<String>class_name,
1317 Register input,
1318 Register temp,
1319 Register temp2) {
1320 Abort("EmitClassOfTest unimplemented.");
1321}
1322
1323
1324void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1325 Abort("DoClassOfTest unimplemented.");
1326}
1327
1328
1329void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1330 Abort("DoClassOfTestAndBranch unimplemented.");
1331}
1332
1333
1334void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1335 Abort("DoCmpMapAndBranch unimplemented.");
1336}
1337
1338
1339void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1340 // We expect object and function in registers r1 and r0.
1341 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1342 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1343
1344 Label true_value, done;
1345 __ tst(r0, r0);
1346 __ mov(r0, Operand(Factory::false_value()), LeaveCC, eq);
1347 __ mov(r0, Operand(Factory::true_value()), LeaveCC, ne);
1348}
1349
1350
1351void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1352 Abort("DoInstanceOfAndBranch unimplemented.");
1353}
1354
1355
1356
1357static Condition ComputeCompareCondition(Token::Value op) {
1358 switch (op) {
1359 case Token::EQ_STRICT:
1360 case Token::EQ:
1361 return eq;
1362 case Token::LT:
1363 return lt;
1364 case Token::GT:
1365 return gt;
1366 case Token::LTE:
1367 return le;
1368 case Token::GTE:
1369 return ge;
1370 default:
1371 UNREACHABLE();
1372 return no_condition;
1373 }
1374}
1375
1376
1377void LCodeGen::DoCmpT(LCmpT* instr) {
1378 Token::Value op = instr->op();
1379
1380 Handle<Code> ic = CompareIC::GetUninitialized(op);
1381 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1382
1383 Condition condition = ComputeCompareCondition(op);
1384 if (op == Token::GT || op == Token::LTE) {
1385 condition = ReverseCondition(condition);
1386 }
1387 __ cmp(r0, Operand(0));
1388 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex,
1389 condition);
1390 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex,
1391 NegateCondition(condition));
1392}
1393
1394
1395void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1396 Abort("DoCmpTAndBranch unimplemented.");
1397}
1398
1399
1400void LCodeGen::DoReturn(LReturn* instr) {
1401 if (FLAG_trace) {
1402 // Push the return value on the stack as the parameter.
1403 // Runtime::TraceExit returns its parameter in r0.
1404 __ push(r0);
1405 __ CallRuntime(Runtime::kTraceExit, 1);
1406 }
1407 int32_t sp_delta = (ParameterCount() + 1) * kPointerSize;
1408 __ mov(sp, fp);
1409 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1410 __ add(sp, sp, Operand(sp_delta));
1411 __ Jump(lr);
1412}
1413
1414
1415void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
1416 Register result = ToRegister(instr->result());
1417 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
1418 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
1419 if (instr->hydrogen()->check_hole_value()) {
1420 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1421 __ cmp(result, ip);
1422 DeoptimizeIf(eq, instr->environment());
1423 }
1424}
1425
1426
1427void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
1428 Register value = ToRegister(instr->input());
1429 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
1430 __ str(value, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
1431}
1432
1433
1434void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
1435 Abort("DoLoadNamedField unimplemented.");
1436}
1437
1438
1439void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1440 ASSERT(ToRegister(instr->object()).is(r0));
1441 ASSERT(ToRegister(instr->result()).is(r0));
1442
1443 // Name is always in r2.
1444 __ mov(r2, Operand(instr->name()));
1445 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1446 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1447}
1448
1449
1450void LCodeGen::DoLoadElements(LLoadElements* instr) {
1451 Abort("DoLoadElements unimplemented.");
1452}
1453
1454
1455void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
1456 Abort("DoAccessArgumentsAt unimplemented.");
1457}
1458
1459
1460void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
1461 Abort("DoLoadKeyedFastElement unimplemented.");
1462}
1463
1464
1465void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
1466 ASSERT(ToRegister(instr->object()).is(r1));
1467 ASSERT(ToRegister(instr->key()).is(r0));
1468
1469 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1470 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1471}
1472
1473
1474void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
1475 Abort("DoArgumentsElements unimplemented.");
1476}
1477
1478
1479void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
1480 Abort("DoArgumentsLength unimplemented.");
1481}
1482
1483
1484void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
1485 Abort("DoApplyArguments unimplemented.");
1486}
1487
1488
1489void LCodeGen::DoPushArgument(LPushArgument* instr) {
1490 LOperand* argument = instr->input();
1491 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
1492 Abort("DoPushArgument not implemented for double type.");
1493 } else {
1494 Register argument_reg = EmitLoadRegister(argument, ip);
1495 __ push(argument_reg);
1496 }
1497}
1498
1499
1500void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
1501 Register result = ToRegister(instr->result());
1502 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
1503}
1504
1505
1506void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
1507 Register result = ToRegister(instr->result());
1508 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
1509 __ ldr(result, FieldMemOperand(result, GlobalObject::kGlobalReceiverOffset));
1510}
1511
1512
1513void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
1514 int arity,
1515 LInstruction* instr) {
1516 // Change context if needed.
1517 bool change_context =
1518 (graph()->info()->closure()->context() != function->context()) ||
1519 scope()->contains_with() ||
1520 (scope()->num_heap_slots() > 0);
1521 if (change_context) {
1522 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1523 }
1524
1525 // Set r0 to arguments count if adaption is not needed. Assumes that r0
1526 // is available to write to at this point.
1527 if (!function->NeedsArgumentsAdaption()) {
1528 __ mov(r0, Operand(arity));
1529 }
1530
1531 LPointerMap* pointers = instr->pointer_map();
1532 RecordPosition(pointers->position());
1533
1534 // Invoke function.
1535 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1536 __ Call(ip);
1537
1538 // Setup deoptimization.
1539 RegisterLazyDeoptimization(instr);
1540
1541 // Restore context.
1542 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1543}
1544
1545
1546void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
1547 Abort("DoCallConstantFunction unimplemented.");
1548}
1549
1550
1551void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
1552 Abort("DoDeferredMathAbsTaggedHeapNumber unimplemented.");
1553}
1554
1555
1556void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
1557 Abort("DoMathAbs unimplemented.");
1558}
1559
1560
1561void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
1562 Abort("DoMathFloor unimplemented.");
1563}
1564
1565
1566void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
1567 Abort("DoMathSqrt unimplemented.");
1568}
1569
1570
1571void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
1572 switch (instr->op()) {
1573 case kMathAbs:
1574 DoMathAbs(instr);
1575 break;
1576 case kMathFloor:
1577 DoMathFloor(instr);
1578 break;
1579 case kMathSqrt:
1580 DoMathSqrt(instr);
1581 break;
1582 default:
1583 Abort("Unimplemented type of LUnaryMathOperation.");
1584 UNREACHABLE();
1585 }
1586}
1587
1588
1589void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
1590 Abort("DoCallKeyed unimplemented.");
1591}
1592
1593
1594void LCodeGen::DoCallNamed(LCallNamed* instr) {
1595 ASSERT(ToRegister(instr->result()).is(r0));
1596
1597 int arity = instr->arity();
1598 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
1599 __ mov(r2, Operand(instr->name()));
1600 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1601 // Restore context register.
1602 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1603}
1604
1605
1606void LCodeGen::DoCallFunction(LCallFunction* instr) {
1607 Abort("DoCallFunction unimplemented.");
1608}
1609
1610
1611void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
1612 Abort("DoCallGlobal unimplemented.");
1613}
1614
1615
1616void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
1617 ASSERT(ToRegister(instr->result()).is(r0));
1618 __ mov(r1, Operand(instr->target()));
1619 CallKnownFunction(instr->target(), instr->arity(), instr);
1620}
1621
1622
1623void LCodeGen::DoCallNew(LCallNew* instr) {
1624 ASSERT(ToRegister(instr->input()).is(r1));
1625 ASSERT(ToRegister(instr->result()).is(r0));
1626
1627 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
1628 __ mov(r0, Operand(instr->arity()));
1629 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
1630}
1631
1632
1633void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
1634 CallRuntime(instr->function(), instr->arity(), instr);
1635}
1636
1637
1638void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
1639 Abort("DoStoreNamedField unimplemented.");
1640}
1641
1642
1643void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
1644 ASSERT(ToRegister(instr->object()).is(r1));
1645 ASSERT(ToRegister(instr->value()).is(r0));
1646
1647 // Name is always in r2.
1648 __ mov(r2, Operand(instr->name()));
1649 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1650 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1651}
1652
1653
1654void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
1655 Abort("DoBoundsCheck unimplemented.");
1656}
1657
1658
1659void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
1660 Abort("DoStoreKeyedFastElement unimplemented.");
1661}
1662
1663
1664void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
1665 ASSERT(ToRegister(instr->object()).is(r2));
1666 ASSERT(ToRegister(instr->key()).is(r1));
1667 ASSERT(ToRegister(instr->value()).is(r0));
1668
1669 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1670 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1671}
1672
1673
1674void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
1675 Abort("DoInteger32ToDouble unimplemented.");
1676}
1677
1678
1679void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
1680 class DeferredNumberTagI: public LDeferredCode {
1681 public:
1682 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
1683 : LDeferredCode(codegen), instr_(instr) { }
1684 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
1685 private:
1686 LNumberTagI* instr_;
1687 };
1688
1689 LOperand* input = instr->input();
1690 ASSERT(input->IsRegister() && input->Equals(instr->result()));
1691 Register reg = ToRegister(input);
1692
1693 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
1694 __ SmiTag(reg, SetCC);
1695 __ b(vs, deferred->entry());
1696 __ bind(deferred->exit());
1697}
1698
1699
1700void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
1701 Label slow;
1702 Register reg = ToRegister(instr->input());
1703 DoubleRegister dbl_scratch = d0;
1704 SwVfpRegister flt_scratch = s0;
1705
1706 // Preserve the value of all registers.
1707 __ PushSafepointRegisters();
1708
1709 // There was overflow, so bits 30 and 31 of the original integer
1710 // disagree. Try to allocate a heap number in new space and store
1711 // the value in there. If that fails, call the runtime system.
1712 Label done;
1713 __ SmiUntag(reg);
1714 __ eor(reg, reg, Operand(0x80000000));
1715 __ vmov(flt_scratch, reg);
1716 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
1717 if (FLAG_inline_new) {
1718 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
1719 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
1720 if (!reg.is(r5)) __ mov(reg, r5);
1721 __ b(&done);
1722 }
1723
1724 // Slow case: Call the runtime system to do the number allocation.
1725 __ bind(&slow);
1726
1727 // TODO(3095996): Put a valid pointer value in the stack slot where the result
1728 // register is stored, as this register is in the pointer map, but contains an
1729 // integer value.
1730 __ mov(ip, Operand(0));
1731 int reg_stack_index = __ SafepointRegisterStackIndex(reg.code());
1732 __ str(ip, MemOperand(sp, reg_stack_index * kPointerSize));
1733
1734 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
1735 RecordSafepointWithRegisters(
1736 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1737 if (!reg.is(r0)) __ mov(reg, r0);
1738
1739 // Done. Put the value in dbl_scratch into the value of the allocated heap
1740 // number.
1741 __ bind(&done);
1742 __ sub(ip, reg, Operand(kHeapObjectTag));
1743 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
1744 __ str(reg, MemOperand(sp, reg_stack_index * kPointerSize));
1745 __ PopSafepointRegisters();
1746}
1747
1748
1749void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
1750 class DeferredNumberTagD: public LDeferredCode {
1751 public:
1752 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
1753 : LDeferredCode(codegen), instr_(instr) { }
1754 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
1755 private:
1756 LNumberTagD* instr_;
1757 };
1758
1759 DoubleRegister input_reg = ToDoubleRegister(instr->input());
1760 Register reg = ToRegister(instr->result());
1761 Register temp1 = ToRegister(instr->temp1());
1762 Register temp2 = ToRegister(instr->temp2());
1763 Register scratch = r9;
1764
1765 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
1766 if (FLAG_inline_new) {
1767 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
1768 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
1769 } else {
1770 __ jmp(deferred->entry());
1771 }
1772 __ bind(deferred->exit());
1773 __ sub(ip, reg, Operand(kHeapObjectTag));
1774 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
1775}
1776
1777
1778void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
1779 // TODO(3095996): Get rid of this. For now, we need to make the
1780 // result register contain a valid pointer because it is already
1781 // contained in the register pointer map.
1782 Register reg = ToRegister(instr->result());
1783 __ mov(reg, Operand(0));
1784
1785 __ PushSafepointRegisters();
1786 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
1787 RecordSafepointWithRegisters(
1788 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1789 int reg_stack_index = __ SafepointRegisterStackIndex(reg.code());
1790 __ str(r0, MemOperand(sp, reg_stack_index * kPointerSize));
1791 __ PopSafepointRegisters();
1792}
1793
1794
1795void LCodeGen::DoSmiTag(LSmiTag* instr) {
1796 LOperand* input = instr->input();
1797 ASSERT(input->IsRegister() && input->Equals(instr->result()));
1798 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
1799 __ SmiTag(ToRegister(input));
1800}
1801
1802
1803void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
1804 Abort("DoSmiUntag unimplemented.");
1805}
1806
1807
1808void LCodeGen::EmitNumberUntagD(Register input_reg,
1809 DoubleRegister result_reg,
1810 LEnvironment* env) {
1811 Register core_scratch = r9;
1812 ASSERT(!input_reg.is(core_scratch));
1813 SwVfpRegister flt_scratch = s0;
1814 ASSERT(!result_reg.is(d0));
1815
1816 Label load_smi, heap_number, done;
1817
1818 // Smi check.
1819 __ tst(input_reg, Operand(kSmiTagMask));
1820 __ b(eq, &load_smi);
1821
1822 // Heap number map check.
1823 __ ldr(core_scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
1824 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1825 __ cmp(core_scratch, Operand(ip));
1826 __ b(eq, &heap_number);
1827
1828 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1829 __ cmp(input_reg, Operand(ip));
1830 DeoptimizeIf(ne, env);
1831
1832 // Convert undefined to NaN.
1833 __ LoadRoot(ip, Heap::kNanValueRootIndex);
1834 __ sub(ip, ip, Operand(kHeapObjectTag));
1835 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
1836 __ jmp(&done);
1837
1838 // Heap number to double register conversion.
1839 __ bind(&heap_number);
1840 __ sub(ip, input_reg, Operand(kHeapObjectTag));
1841 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
1842 __ jmp(&done);
1843
1844 // Smi to double register conversion
1845 __ bind(&load_smi);
1846 __ SmiUntag(input_reg); // Untag smi before converting to float.
1847 __ vmov(flt_scratch, input_reg);
1848 __ vcvt_f64_s32(result_reg, flt_scratch);
1849 __ SmiTag(input_reg); // Retag smi.
1850 __ bind(&done);
1851}
1852
1853
1854class DeferredTaggedToI: public LDeferredCode {
1855 public:
1856 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
1857 : LDeferredCode(codegen), instr_(instr) { }
1858 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
1859 private:
1860 LTaggedToI* instr_;
1861};
1862
1863
1864void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
1865 Label done;
1866 Register input_reg = ToRegister(instr->input());
1867 Register core_scratch = r9;
1868 ASSERT(!input_reg.is(core_scratch));
1869 DoubleRegister dbl_scratch = d0;
1870 SwVfpRegister flt_scratch = s0;
1871 DoubleRegister dbl_tmp = ToDoubleRegister(instr->temp());
1872
1873 // Heap number map check.
1874 __ ldr(core_scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
1875 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1876 __ cmp(core_scratch, Operand(ip));
1877
1878 if (instr->truncating()) {
1879 Label heap_number;
1880 __ b(eq, &heap_number);
1881 // Check for undefined. Undefined is converted to zero for truncating
1882 // conversions.
1883 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1884 __ cmp(input_reg, Operand(ip));
1885 DeoptimizeIf(ne, instr->environment());
1886 __ mov(input_reg, Operand(0));
1887 __ b(&done);
1888
1889 __ bind(&heap_number);
1890 __ sub(ip, input_reg, Operand(kHeapObjectTag));
1891 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
1892 __ vcmp(dbl_tmp, 0.0); // Sets overflow bit if NaN.
1893 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
1894 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
1895 __ vmrs(pc); // Move vector status bits to normal status bits.
1896 // Overflow bit is set if dbl_tmp is Nan.
1897 __ cmn(input_reg, Operand(1), vc); // 0x7fffffff + 1 -> overflow.
1898 __ cmp(input_reg, Operand(1), vc); // 0x80000000 - 1 -> overflow.
1899 DeoptimizeIf(vs, instr->environment()); // Saturation may have occured.
1900
1901 } else {
1902 // Deoptimize if we don't have a heap number.
1903 DeoptimizeIf(ne, instr->environment());
1904
1905 __ sub(ip, input_reg, Operand(kHeapObjectTag));
1906 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
1907 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
1908 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
1909 // Non-truncating conversion means that we cannot lose bits, so we convert
1910 // back to check; note that using non-overlapping s and d regs would be
1911 // slightly faster.
1912 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
1913 __ vcmp(dbl_scratch, dbl_tmp);
1914 __ vmrs(pc); // Move vector status bits to normal status bits.
1915 DeoptimizeIf(ne, instr->environment()); // Not equal or unordered.
1916 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1917 __ tst(input_reg, Operand(input_reg));
1918 __ b(ne, &done);
1919 __ vmov(lr, ip, dbl_tmp);
1920 __ tst(ip, Operand(1 << 31)); // Test sign bit.
1921 DeoptimizeIf(ne, instr->environment());
1922 }
1923 }
1924 __ bind(&done);
1925}
1926
1927
1928void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
1929 LOperand* input = instr->input();
1930 ASSERT(input->IsRegister());
1931 ASSERT(input->Equals(instr->result()));
1932
1933 Register input_reg = ToRegister(input);
1934
1935 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
1936
1937 // Smi check.
1938 __ tst(input_reg, Operand(kSmiTagMask));
1939 __ b(ne, deferred->entry());
1940
1941 // Smi to int32 conversion
1942 __ SmiUntag(input_reg); // Untag smi.
1943
1944 __ bind(deferred->exit());
1945}
1946
1947
1948void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
1949 LOperand* input = instr->input();
1950 ASSERT(input->IsRegister());
1951 LOperand* result = instr->result();
1952 ASSERT(result->IsDoubleRegister());
1953
1954 Register input_reg = ToRegister(input);
1955 DoubleRegister result_reg = ToDoubleRegister(result);
1956
1957 EmitNumberUntagD(input_reg, result_reg, instr->environment());
1958}
1959
1960
1961void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
1962 Abort("DoDoubleToI unimplemented.");
1963}
1964
1965
1966void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
1967 LOperand* input = instr->input();
1968 ASSERT(input->IsRegister());
1969 __ tst(ToRegister(input), Operand(kSmiTagMask));
1970 DeoptimizeIf(instr->condition(), instr->environment());
1971}
1972
1973
1974void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
1975 Abort("DoCheckInstanceType unimplemented.");
1976}
1977
1978
1979void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
1980 ASSERT(instr->input()->IsRegister());
1981 Register reg = ToRegister(instr->input());
1982 __ cmp(reg, Operand(instr->hydrogen()->target()));
1983 DeoptimizeIf(ne, instr->environment());
1984}
1985
1986
1987void LCodeGen::DoCheckMap(LCheckMap* instr) {
1988 LOperand* input = instr->input();
1989 ASSERT(input->IsRegister());
1990 Register reg = ToRegister(input);
1991 __ ldr(r9, FieldMemOperand(reg, HeapObject::kMapOffset));
1992 __ cmp(r9, Operand(instr->hydrogen()->map()));
1993 DeoptimizeIf(ne, instr->environment());
1994}
1995
1996
1997void LCodeGen::LoadPrototype(Register result,
1998 Handle<JSObject> prototype) {
1999 Abort("LoadPrototype unimplemented.");
2000}
2001
2002
2003void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
2004 Abort("DoCheckPrototypeMaps unimplemented.");
2005}
2006
2007
2008void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
2009 Abort("DoArrayLiteral unimplemented.");
2010}
2011
2012
2013void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
2014 Abort("DoObjectLiteral unimplemented.");
2015}
2016
2017
2018void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
2019 Abort("DoRegExpLiteral unimplemented.");
2020}
2021
2022
2023void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
2024 Abort("DoFunctionLiteral unimplemented.");
2025}
2026
2027
2028void LCodeGen::DoTypeof(LTypeof* instr) {
2029 Abort("DoTypeof unimplemented.");
2030}
2031
2032
2033void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
2034 Abort("DoTypeofIs unimplemented.");
2035}
2036
2037
2038void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
2039 Register input = ToRegister(instr->input());
2040 int true_block = chunk_->LookupDestination(instr->true_block_id());
2041 int false_block = chunk_->LookupDestination(instr->false_block_id());
2042 Label* true_label = chunk_->GetAssemblyLabel(true_block);
2043 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2044
2045 Condition final_branch_condition = EmitTypeofIs(true_label,
2046 false_label,
2047 input,
2048 instr->type_literal());
2049
2050 EmitBranch(true_block, false_block, final_branch_condition);
2051}
2052
2053
2054Condition LCodeGen::EmitTypeofIs(Label* true_label,
2055 Label* false_label,
2056 Register input,
2057 Handle<String> type_name) {
2058 Condition final_branch_condition = no_condition;
2059 Register core_scratch = r9;
2060 ASSERT(!input.is(core_scratch));
2061 if (type_name->Equals(Heap::number_symbol())) {
2062 __ tst(input, Operand(kSmiTagMask));
2063 __ b(eq, true_label);
2064 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2065 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2066 __ cmp(input, Operand(ip));
2067 final_branch_condition = eq;
2068
2069 } else if (type_name->Equals(Heap::string_symbol())) {
2070 __ tst(input, Operand(kSmiTagMask));
2071 __ b(eq, false_label);
2072 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2073 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2074 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2075 __ b(ne, false_label);
2076 __ CompareInstanceType(input, core_scratch, FIRST_NONSTRING_TYPE);
2077 final_branch_condition = lo;
2078
2079 } else if (type_name->Equals(Heap::boolean_symbol())) {
2080 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2081 __ cmp(input, ip);
2082 __ b(eq, true_label);
2083 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2084 __ cmp(input, ip);
2085 final_branch_condition = eq;
2086
2087 } else if (type_name->Equals(Heap::undefined_symbol())) {
2088 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2089 __ cmp(input, ip);
2090 __ b(eq, true_label);
2091 __ tst(input, Operand(kSmiTagMask));
2092 __ b(eq, false_label);
2093 // Check for undetectable objects => true.
2094 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2095 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2096 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2097 final_branch_condition = ne;
2098
2099 } else if (type_name->Equals(Heap::function_symbol())) {
2100 __ tst(input, Operand(kSmiTagMask));
2101 __ b(eq, false_label);
2102 __ CompareObjectType(input, input, core_scratch, JS_FUNCTION_TYPE);
2103 __ b(eq, true_label);
2104 // Regular expressions => 'function' (they are callable).
2105 __ CompareInstanceType(input, core_scratch, JS_REGEXP_TYPE);
2106 final_branch_condition = eq;
2107
2108 } else if (type_name->Equals(Heap::object_symbol())) {
2109 __ tst(input, Operand(kSmiTagMask));
2110 __ b(eq, false_label);
2111 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2112 __ cmp(input, ip);
2113 __ b(eq, true_label);
2114 // Regular expressions => 'function', not 'object'.
2115 __ CompareObjectType(input, input, core_scratch, JS_REGEXP_TYPE);
2116 __ b(eq, false_label);
2117 // Check for undetectable objects => false.
2118 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2119 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2120 __ b(ne, false_label);
2121 // Check for JS objects => true.
2122 __ CompareInstanceType(input, core_scratch, FIRST_JS_OBJECT_TYPE);
2123 __ b(lo, false_label);
2124 __ CompareInstanceType(input, core_scratch, LAST_JS_OBJECT_TYPE);
2125 final_branch_condition = ls;
2126
2127 } else {
2128 final_branch_condition = ne;
2129 __ b(false_label);
2130 // A dead branch instruction will be generated after this point.
2131 }
2132
2133 return final_branch_condition;
2134}
2135
2136
2137void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
2138 // No code for lazy bailout instruction. Used to capture environment after a
2139 // call for populating the safepoint data with deoptimization data.
2140}
2141
2142
2143void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
2144 DeoptimizeIf(no_condition, instr->environment());
2145}
2146
2147
2148void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
2149 Abort("DoDeleteProperty unimplemented.");
2150}
2151
2152
2153void LCodeGen::DoStackCheck(LStackCheck* instr) {
2154 // Perform stack overflow check.
2155 Label ok;
2156 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
2157 __ cmp(sp, Operand(ip));
2158 __ b(hs, &ok);
2159 StackCheckStub stub;
2160 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2161 __ bind(&ok);
2162}
2163
2164
2165void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
2166 Abort("DoOsrEntry unimplemented.");
2167}
2168
2169
2170#undef __
2171
2172} } // namespace v8::internal