blob: 976576be6f4cc1a8ce932e14369f034d60bfc09c [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
43 int deoptimization_index)
44 : codegen_(codegen),
45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { }
47 virtual ~SafepointGenerator() { }
48
Ben Murdoch257744e2011-11-30 15:57:28 +000049 virtual void BeforeCall(int call_size) const {
Steve Block44f0eee2011-05-26 01:26:41 +010050 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65
Ben Murdoch257744e2011-11-30 15:57:28 +000066 virtual void AfterCall() const {
Ben Murdochb0fe1622011-05-05 13:52:32 +010067 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
68 }
69
70 private:
71 LCodeGen* codegen_;
72 LPointerMap* pointers_;
73 int deoptimization_index_;
74};
75
76
77#define __ masm()->
78
79bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused());
82 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3);
84 CpuFeatures::Scope scope2(ARMv7);
85 return GeneratePrologue() &&
86 GenerateBody() &&
87 GenerateDeferredCode() &&
Ben Murdoch257744e2011-11-30 15:57:28 +000088 GenerateDeoptJumpTable() &&
Ben Murdochb0fe1622011-05-05 13:52:32 +010089 GenerateSafepointTable();
90}
91
92
93void LCodeGen::FinishCode(Handle<Code> code) {
94 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000095 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010096 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010097 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010098 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010099}
100
101
102void LCodeGen::Abort(const char* format, ...) {
103 if (FLAG_trace_bailout) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100104 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
105 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100106 va_list arguments;
107 va_start(arguments, format);
108 OS::VPrint(format, arguments);
109 va_end(arguments);
110 PrintF("\n");
111 }
112 status_ = ABORTED;
113}
114
115
116void LCodeGen::Comment(const char* format, ...) {
117 if (!FLAG_code_comments) return;
118 char buffer[4 * KB];
119 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
120 va_list arguments;
121 va_start(arguments, format);
122 builder.AddFormattedList(format, arguments);
123 va_end(arguments);
124
125 // Copy the string before recording it in the assembler to avoid
126 // issues when the stack allocated buffer goes out of scope.
127 size_t length = builder.position();
128 Vector<char> copy = Vector<char>::New(length + 1);
129 memcpy(copy.start(), builder.Finalize(), copy.length());
130 masm()->RecordComment(copy.start());
131}
132
133
134bool LCodeGen::GeneratePrologue() {
135 ASSERT(is_generating());
136
137#ifdef DEBUG
138 if (strlen(FLAG_stop_at) > 0 &&
139 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
140 __ stop("stop_at");
141 }
142#endif
143
144 // r1: Callee's JS function.
145 // cp: Callee's context.
146 // fp: Caller's frame pointer.
147 // lr: Caller's pc.
148
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000149 // Strict mode functions and builtins need to replace the receiver
150 // with undefined when called as functions (without an explicit
151 // receiver object). r5 is zero for method calls and non-zero for
152 // function calls.
153 if (info_->is_strict_mode() || info_->is_native()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000154 Label ok;
155 __ cmp(r5, Operand(0));
156 __ b(eq, &ok);
157 int receiver_offset = scope()->num_parameters() * kPointerSize;
158 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
159 __ str(r2, MemOperand(sp, receiver_offset));
160 __ bind(&ok);
161 }
162
Ben Murdochb0fe1622011-05-05 13:52:32 +0100163 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
164 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
165
166 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000167 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100168 if (slots > 0) {
169 if (FLAG_debug_code) {
170 __ mov(r0, Operand(slots));
171 __ mov(r2, Operand(kSlotsZapValue));
172 Label loop;
173 __ bind(&loop);
174 __ push(r2);
175 __ sub(r0, r0, Operand(1), SetCC);
176 __ b(ne, &loop);
177 } else {
178 __ sub(sp, sp, Operand(slots * kPointerSize));
179 }
180 }
181
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100182 // Possibly allocate a local context.
183 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (heap_slots > 0) {
185 Comment(";;; Allocate local context");
186 // Argument to NewContext is the function, which is in r1.
187 __ push(r1);
188 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
189 FastNewContextStub stub(heap_slots);
190 __ CallStub(&stub);
191 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000192 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100193 }
194 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
195 // Context is returned in both r0 and cp. It replaces the context
196 // passed to us. It's saved in the stack and kept live in cp.
197 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
198 // Copy any necessary parameters into the context.
199 int num_parameters = scope()->num_parameters();
200 for (int i = 0; i < num_parameters; i++) {
201 Slot* slot = scope()->parameter(i)->AsSlot();
202 if (slot != NULL && slot->type() == Slot::CONTEXT) {
203 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204 (num_parameters - 1 - i) * kPointerSize;
205 // Load parameter from stack.
206 __ ldr(r0, MemOperand(fp, parameter_offset));
207 // Store it in the context.
208 __ mov(r1, Operand(Context::SlotOffset(slot->index())));
209 __ str(r0, MemOperand(cp, r1));
210 // Update the write barrier. This clobbers all involved
211 // registers, so we have to use two more registers to avoid
212 // clobbering cp.
213 __ mov(r2, Operand(cp));
214 __ RecordWrite(r2, Operand(r1), r3, r0);
215 }
216 }
217 Comment(";;; End allocate local context");
218 }
219
Ben Murdochb0fe1622011-05-05 13:52:32 +0100220 // Trace the call.
221 if (FLAG_trace) {
222 __ CallRuntime(Runtime::kTraceEnter, 0);
223 }
224 return !is_aborted();
225}
226
227
228bool LCodeGen::GenerateBody() {
229 ASSERT(is_generating());
230 bool emit_instructions = true;
231 for (current_instruction_ = 0;
232 !is_aborted() && current_instruction_ < instructions_->length();
233 current_instruction_++) {
234 LInstruction* instr = instructions_->at(current_instruction_);
235 if (instr->IsLabel()) {
236 LLabel* label = LLabel::cast(instr);
237 emit_instructions = !label->HasReplacement();
238 }
239
240 if (emit_instructions) {
241 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
242 instr->CompileToNative(this);
243 }
244 }
245 return !is_aborted();
246}
247
248
249LInstruction* LCodeGen::GetNextInstruction() {
250 if (current_instruction_ < instructions_->length() - 1) {
251 return instructions_->at(current_instruction_ + 1);
252 } else {
253 return NULL;
254 }
255}
256
257
258bool LCodeGen::GenerateDeferredCode() {
259 ASSERT(is_generating());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000260 if (deferred_.length() > 0) {
261 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
262 LDeferredCode* code = deferred_[i];
263 __ bind(code->entry());
264 code->Generate();
265 __ jmp(code->exit());
266 }
267
268 // Pad code to ensure that the last piece of deferred code have
269 // room for lazy bailout.
270 while ((masm()->pc_offset() - LastSafepointEnd())
271 < Deoptimizer::patch_size()) {
272 __ nop();
273 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100274 }
275
Ben Murdoch257744e2011-11-30 15:57:28 +0000276 // Force constant pool emission at the end of the deferred code to make
277 // sure that no constant pools are emitted after.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100278 masm()->CheckConstPool(true, false);
279
Ben Murdoch257744e2011-11-30 15:57:28 +0000280 return !is_aborted();
281}
282
283
284bool LCodeGen::GenerateDeoptJumpTable() {
285 // Check that the jump table is accessible from everywhere in the function
286 // code, ie that offsets to the table can be encoded in the 24bit signed
287 // immediate of a branch instruction.
288 // To simplify we consider the code size from the first instruction to the
289 // end of the jump table. We also don't consider the pc load delta.
290 // Each entry in the jump table generates one instruction and inlines one
291 // 32bit data after it.
292 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
293 deopt_jump_table_.length() * 2)) {
294 Abort("Generated code is too large");
295 }
296
297 // Block the constant pool emission during the jump table emission.
298 __ BlockConstPoolFor(deopt_jump_table_.length());
299 __ RecordComment("[ Deoptimisation jump table");
300 Label table_start;
301 __ bind(&table_start);
302 for (int i = 0; i < deopt_jump_table_.length(); i++) {
303 __ bind(&deopt_jump_table_[i].label);
304 __ ldr(pc, MemOperand(pc, Assembler::kInstrSize - Assembler::kPcLoadDelta));
305 __ dd(reinterpret_cast<uint32_t>(deopt_jump_table_[i].address));
306 }
307 ASSERT(masm()->InstructionsGeneratedSince(&table_start) ==
308 deopt_jump_table_.length() * 2);
309 __ RecordComment("]");
310
311 // The deoptimization jump table is the last part of the instruction
312 // sequence. Mark the generated code as done unless we bailed out.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100313 if (!is_aborted()) status_ = DONE;
314 return !is_aborted();
315}
316
317
318bool LCodeGen::GenerateSafepointTable() {
319 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +0000320 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100321 return !is_aborted();
322}
323
324
325Register LCodeGen::ToRegister(int index) const {
326 return Register::FromAllocationIndex(index);
327}
328
329
330DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
331 return DoubleRegister::FromAllocationIndex(index);
332}
333
334
335Register LCodeGen::ToRegister(LOperand* op) const {
336 ASSERT(op->IsRegister());
337 return ToRegister(op->index());
338}
339
340
341Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
342 if (op->IsRegister()) {
343 return ToRegister(op->index());
344 } else if (op->IsConstantOperand()) {
345 __ mov(scratch, ToOperand(op));
346 return scratch;
347 } else if (op->IsStackSlot() || op->IsArgument()) {
348 __ ldr(scratch, ToMemOperand(op));
349 return scratch;
350 }
351 UNREACHABLE();
352 return scratch;
353}
354
355
356DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
357 ASSERT(op->IsDoubleRegister());
358 return ToDoubleRegister(op->index());
359}
360
361
362DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
363 SwVfpRegister flt_scratch,
364 DoubleRegister dbl_scratch) {
365 if (op->IsDoubleRegister()) {
366 return ToDoubleRegister(op->index());
367 } else if (op->IsConstantOperand()) {
368 LConstantOperand* const_op = LConstantOperand::cast(op);
369 Handle<Object> literal = chunk_->LookupLiteral(const_op);
370 Representation r = chunk_->LookupLiteralRepresentation(const_op);
371 if (r.IsInteger32()) {
372 ASSERT(literal->IsNumber());
373 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
374 __ vmov(flt_scratch, ip);
375 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
376 return dbl_scratch;
377 } else if (r.IsDouble()) {
378 Abort("unsupported double immediate");
379 } else if (r.IsTagged()) {
380 Abort("unsupported tagged immediate");
381 }
382 } else if (op->IsStackSlot() || op->IsArgument()) {
383 // TODO(regis): Why is vldr not taking a MemOperand?
384 // __ vldr(dbl_scratch, ToMemOperand(op));
385 MemOperand mem_op = ToMemOperand(op);
386 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
387 return dbl_scratch;
388 }
389 UNREACHABLE();
390 return dbl_scratch;
391}
392
393
394int LCodeGen::ToInteger32(LConstantOperand* op) const {
395 Handle<Object> value = chunk_->LookupLiteral(op);
396 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
397 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
398 value->Number());
399 return static_cast<int32_t>(value->Number());
400}
401
402
403Operand LCodeGen::ToOperand(LOperand* op) {
404 if (op->IsConstantOperand()) {
405 LConstantOperand* const_op = LConstantOperand::cast(op);
406 Handle<Object> literal = chunk_->LookupLiteral(const_op);
407 Representation r = chunk_->LookupLiteralRepresentation(const_op);
408 if (r.IsInteger32()) {
409 ASSERT(literal->IsNumber());
410 return Operand(static_cast<int32_t>(literal->Number()));
411 } else if (r.IsDouble()) {
412 Abort("ToOperand Unsupported double immediate.");
413 }
414 ASSERT(r.IsTagged());
415 return Operand(literal);
416 } else if (op->IsRegister()) {
417 return Operand(ToRegister(op));
418 } else if (op->IsDoubleRegister()) {
419 Abort("ToOperand IsDoubleRegister unimplemented");
420 return Operand(0);
421 }
422 // Stack slots not implemented, use ToMemOperand instead.
423 UNREACHABLE();
424 return Operand(0);
425}
426
427
428MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100429 ASSERT(!op->IsRegister());
430 ASSERT(!op->IsDoubleRegister());
431 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
432 int index = op->index();
433 if (index >= 0) {
434 // Local or spill slot. Skip the frame pointer, function, and
435 // context in the fixed part of the frame.
436 return MemOperand(fp, -(index + 3) * kPointerSize);
437 } else {
438 // Incoming parameter. Skip the return address.
439 return MemOperand(fp, -(index - 1) * kPointerSize);
440 }
441}
442
443
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100444MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
445 ASSERT(op->IsDoubleStackSlot());
446 int index = op->index();
447 if (index >= 0) {
448 // Local or spill slot. Skip the frame pointer, function, context,
449 // and the first word of the double in the fixed part of the frame.
450 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
451 } else {
452 // Incoming parameter. Skip the return address and the first word of
453 // the double.
454 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
455 }
456}
457
458
Ben Murdochb8e0da22011-05-16 14:20:40 +0100459void LCodeGen::WriteTranslation(LEnvironment* environment,
460 Translation* translation) {
461 if (environment == NULL) return;
462
463 // The translation includes one command per value in the environment.
464 int translation_size = environment->values()->length();
465 // The output frame height does not include the parameters.
466 int height = translation_size - environment->parameter_count();
467
468 WriteTranslation(environment->outer(), translation);
469 int closure_id = DefineDeoptimizationLiteral(environment->closure());
470 translation->BeginFrame(environment->ast_id(), closure_id, height);
471 for (int i = 0; i < translation_size; ++i) {
472 LOperand* value = environment->values()->at(i);
473 // spilled_registers_ and spilled_double_registers_ are either
474 // both NULL or both set.
475 if (environment->spilled_registers() != NULL && value != NULL) {
476 if (value->IsRegister() &&
477 environment->spilled_registers()[value->index()] != NULL) {
478 translation->MarkDuplicate();
479 AddToTranslation(translation,
480 environment->spilled_registers()[value->index()],
481 environment->HasTaggedValueAt(i));
482 } else if (
483 value->IsDoubleRegister() &&
484 environment->spilled_double_registers()[value->index()] != NULL) {
485 translation->MarkDuplicate();
486 AddToTranslation(
487 translation,
488 environment->spilled_double_registers()[value->index()],
489 false);
490 }
491 }
492
493 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
494 }
495}
496
497
Ben Murdochb0fe1622011-05-05 13:52:32 +0100498void LCodeGen::AddToTranslation(Translation* translation,
499 LOperand* op,
500 bool is_tagged) {
501 if (op == NULL) {
502 // TODO(twuerthinger): Introduce marker operands to indicate that this value
503 // is not present and must be reconstructed from the deoptimizer. Currently
504 // this is only used for the arguments object.
505 translation->StoreArgumentsObject();
506 } else if (op->IsStackSlot()) {
507 if (is_tagged) {
508 translation->StoreStackSlot(op->index());
509 } else {
510 translation->StoreInt32StackSlot(op->index());
511 }
512 } else if (op->IsDoubleStackSlot()) {
513 translation->StoreDoubleStackSlot(op->index());
514 } else if (op->IsArgument()) {
515 ASSERT(is_tagged);
Ben Murdoch257744e2011-11-30 15:57:28 +0000516 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100517 translation->StoreStackSlot(src_index);
518 } else if (op->IsRegister()) {
519 Register reg = ToRegister(op);
520 if (is_tagged) {
521 translation->StoreRegister(reg);
522 } else {
523 translation->StoreInt32Register(reg);
524 }
525 } else if (op->IsDoubleRegister()) {
526 DoubleRegister reg = ToDoubleRegister(op);
527 translation->StoreDoubleRegister(reg);
528 } else if (op->IsConstantOperand()) {
529 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
530 int src_index = DefineDeoptimizationLiteral(literal);
531 translation->StoreLiteral(src_index);
532 } else {
533 UNREACHABLE();
534 }
535}
536
537
538void LCodeGen::CallCode(Handle<Code> code,
539 RelocInfo::Mode mode,
540 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100541 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
542}
543
544
545void LCodeGen::CallCodeGeneric(Handle<Code> code,
546 RelocInfo::Mode mode,
547 LInstruction* instr,
548 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100549 ASSERT(instr != NULL);
550 LPointerMap* pointers = instr->pointer_map();
551 RecordPosition(pointers->position());
552 __ Call(code, mode);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100553 RegisterLazyDeoptimization(instr, safepoint_mode);
Ben Murdoch18a6f572011-07-25 17:16:09 +0100554
555 // Signal that we don't inline smi code before these stubs in the
556 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000557 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdoch18a6f572011-07-25 17:16:09 +0100558 code->kind() == Code::COMPARE_IC) {
559 __ nop();
560 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100561}
562
563
Steve Block44f0eee2011-05-26 01:26:41 +0100564void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100565 int num_arguments,
566 LInstruction* instr) {
567 ASSERT(instr != NULL);
568 LPointerMap* pointers = instr->pointer_map();
569 ASSERT(pointers != NULL);
570 RecordPosition(pointers->position());
571
572 __ CallRuntime(function, num_arguments);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100573 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100574}
575
576
Ben Murdoch8b112d22011-06-08 16:22:53 +0100577void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
578 int argc,
579 LInstruction* instr) {
580 __ CallRuntimeSaveDoubles(id);
581 RecordSafepointWithRegisters(
582 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
583}
584
585
586void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
587 SafepointMode safepoint_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100588 // Create the environment to bailout to. If the call has side effects
589 // execution has to continue after the call otherwise execution can continue
590 // from a previous bailout point repeating the call.
591 LEnvironment* deoptimization_environment;
592 if (instr->HasDeoptimizationEnvironment()) {
593 deoptimization_environment = instr->deoptimization_environment();
594 } else {
595 deoptimization_environment = instr->environment();
596 }
597
598 RegisterEnvironmentForDeoptimization(deoptimization_environment);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100599 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
600 RecordSafepoint(instr->pointer_map(),
601 deoptimization_environment->deoptimization_index());
602 } else {
603 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
604 RecordSafepointWithRegisters(
605 instr->pointer_map(),
606 0,
607 deoptimization_environment->deoptimization_index());
608 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100609}
610
611
612void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
613 if (!environment->HasBeenRegistered()) {
614 // Physical stack frame layout:
615 // -x ............. -4 0 ..................................... y
616 // [incoming arguments] [spill slots] [pushed outgoing arguments]
617
618 // Layout of the environment:
619 // 0 ..................................................... size-1
620 // [parameters] [locals] [expression stack including arguments]
621
622 // Layout of the translation:
623 // 0 ........................................................ size - 1 + 4
624 // [expression stack including arguments] [locals] [4 words] [parameters]
625 // |>------------ translation_size ------------<|
626
627 int frame_count = 0;
628 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
629 ++frame_count;
630 }
631 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100632 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100633 int deoptimization_index = deoptimizations_.length();
634 environment->Register(deoptimization_index, translation.index());
635 deoptimizations_.Add(environment);
636 }
637}
638
639
640void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
641 RegisterEnvironmentForDeoptimization(environment);
642 ASSERT(environment->HasBeenRegistered());
643 int id = environment->deoptimization_index();
644 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
645 ASSERT(entry != NULL);
646 if (entry == NULL) {
647 Abort("bailout was not prepared");
648 return;
649 }
650
651 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
652
653 if (FLAG_deopt_every_n_times == 1 &&
654 info_->shared_info()->opt_count() == id) {
655 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
656 return;
657 }
658
Ben Murdoch257744e2011-11-30 15:57:28 +0000659 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt", cc);
660
Steve Block1e0659c2011-05-24 12:43:12 +0100661 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100662 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
663 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000664 // We often have several deopts to the same entry, reuse the last
665 // jump entry if this is the case.
666 if (deopt_jump_table_.is_empty() ||
667 (deopt_jump_table_.last().address != entry)) {
668 deopt_jump_table_.Add(JumpTableEntry(entry));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100669 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000670 __ b(cc, &deopt_jump_table_.last().label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100671 }
672}
673
674
675void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
676 int length = deoptimizations_.length();
677 if (length == 0) return;
678 ASSERT(FLAG_deopt);
679 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100680 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100681
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100682 Handle<ByteArray> translations = translations_.CreateByteArray();
683 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100684 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
685
686 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100687 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100688 for (int i = 0; i < deoptimization_literals_.length(); i++) {
689 literals->set(i, *deoptimization_literals_[i]);
690 }
691 data->SetLiteralArray(*literals);
692
693 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
694 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
695
696 // Populate the deoptimization entries.
697 for (int i = 0; i < length; i++) {
698 LEnvironment* env = deoptimizations_[i];
699 data->SetAstId(i, Smi::FromInt(env->ast_id()));
700 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
701 data->SetArgumentsStackHeight(i,
702 Smi::FromInt(env->arguments_stack_height()));
703 }
704 code->set_deoptimization_data(*data);
705}
706
707
708int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
709 int result = deoptimization_literals_.length();
710 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
711 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
712 }
713 deoptimization_literals_.Add(literal);
714 return result;
715}
716
717
718void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
719 ASSERT(deoptimization_literals_.length() == 0);
720
721 const ZoneList<Handle<JSFunction> >* inlined_closures =
722 chunk()->inlined_closures();
723
724 for (int i = 0, length = inlined_closures->length();
725 i < length;
726 i++) {
727 DefineDeoptimizationLiteral(inlined_closures->at(i));
728 }
729
730 inlined_function_count_ = deoptimization_literals_.length();
731}
732
733
Steve Block1e0659c2011-05-24 12:43:12 +0100734void LCodeGen::RecordSafepoint(
735 LPointerMap* pointers,
736 Safepoint::Kind kind,
737 int arguments,
738 int deoptimization_index) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100739 ASSERT(expected_safepoint_kind_ == kind);
740
Ben Murdochb0fe1622011-05-05 13:52:32 +0100741 const ZoneList<LOperand*>* operands = pointers->operands();
742 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100743 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100744 for (int i = 0; i < operands->length(); i++) {
745 LOperand* pointer = operands->at(i);
746 if (pointer->IsStackSlot()) {
747 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100748 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
749 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100750 }
751 }
Steve Block1e0659c2011-05-24 12:43:12 +0100752 if (kind & Safepoint::kWithRegisters) {
753 // Register cp always contains a pointer to the context.
754 safepoint.DefinePointerRegister(cp);
755 }
756}
757
758
759void LCodeGen::RecordSafepoint(LPointerMap* pointers,
760 int deoptimization_index) {
761 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100762}
763
764
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100765void LCodeGen::RecordSafepoint(int deoptimization_index) {
766 LPointerMap empty_pointers(RelocInfo::kNoPosition);
767 RecordSafepoint(&empty_pointers, deoptimization_index);
768}
769
770
Ben Murdochb0fe1622011-05-05 13:52:32 +0100771void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
772 int arguments,
773 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100774 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
775 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100776}
777
778
Ben Murdochb8e0da22011-05-16 14:20:40 +0100779void LCodeGen::RecordSafepointWithRegistersAndDoubles(
780 LPointerMap* pointers,
781 int arguments,
782 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100783 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
784 deoptimization_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100785}
786
787
Ben Murdochb0fe1622011-05-05 13:52:32 +0100788void LCodeGen::RecordPosition(int position) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000789 if (position == RelocInfo::kNoPosition) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100790 masm()->positions_recorder()->RecordPosition(position);
791}
792
793
794void LCodeGen::DoLabel(LLabel* label) {
795 if (label->is_loop_header()) {
796 Comment(";;; B%d - LOOP entry", label->block_id());
797 } else {
798 Comment(";;; B%d", label->block_id());
799 }
800 __ bind(label->label());
801 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +0000802 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100803}
804
805
806void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100807 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100808}
809
810
811void LCodeGen::DoGap(LGap* gap) {
812 for (int i = LGap::FIRST_INNER_POSITION;
813 i <= LGap::LAST_INNER_POSITION;
814 i++) {
815 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
816 LParallelMove* move = gap->GetParallelMove(inner_pos);
817 if (move != NULL) DoParallelMove(move);
818 }
819
820 LInstruction* next = GetNextInstruction();
821 if (next != NULL && next->IsLazyBailout()) {
822 int pc = masm()->pc_offset();
823 safepoints_.SetPcAfterGap(pc);
824 }
825}
826
827
Ben Murdoch257744e2011-11-30 15:57:28 +0000828void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
829 DoGap(instr);
830}
831
832
Ben Murdochb0fe1622011-05-05 13:52:32 +0100833void LCodeGen::DoParameter(LParameter* instr) {
834 // Nothing to do.
835}
836
837
838void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100839 ASSERT(ToRegister(instr->result()).is(r0));
840 switch (instr->hydrogen()->major_key()) {
841 case CodeStub::RegExpConstructResult: {
842 RegExpConstructResultStub stub;
843 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
844 break;
845 }
846 case CodeStub::RegExpExec: {
847 RegExpExecStub stub;
848 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
849 break;
850 }
851 case CodeStub::SubString: {
852 SubStringStub stub;
853 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
854 break;
855 }
Steve Block9fac8402011-05-12 15:51:54 +0100856 case CodeStub::NumberToString: {
857 NumberToStringStub stub;
858 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
859 break;
860 }
861 case CodeStub::StringAdd: {
862 StringAddStub stub(NO_STRING_ADD_FLAGS);
863 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
864 break;
865 }
866 case CodeStub::StringCompare: {
867 StringCompareStub stub;
868 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
869 break;
870 }
871 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100872 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100873 TranscendentalCacheStub stub(instr->transcendental_type(),
874 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100875 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100876 break;
877 }
878 default:
879 UNREACHABLE();
880 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100881}
882
883
884void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
885 // Nothing to do.
886}
887
888
889void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100890 if (instr->hydrogen()->HasPowerOf2Divisor()) {
891 Register dividend = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000892 Register result = ToRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +0100893
894 int32_t divisor =
895 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
896
897 if (divisor < 0) divisor = -divisor;
898
899 Label positive_dividend, done;
900 __ cmp(dividend, Operand(0));
901 __ b(pl, &positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000902 __ rsb(result, dividend, Operand(0));
903 __ and_(result, result, Operand(divisor - 1), SetCC);
Steve Block44f0eee2011-05-26 01:26:41 +0100904 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000905 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100906 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000907 __ rsb(result, result, Operand(0));
908 __ b(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100909 __ bind(&positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000910 __ and_(result, dividend, Operand(divisor - 1));
Steve Block44f0eee2011-05-26 01:26:41 +0100911 __ bind(&done);
912 return;
913 }
914
Ben Murdochb8e0da22011-05-16 14:20:40 +0100915 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100916 Register left = ToRegister(instr->InputAt(0));
917 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100918 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100919
Steve Block44f0eee2011-05-26 01:26:41 +0100920 Register scratch = scratch0();
921 Register scratch2 = ToRegister(instr->TempAt(0));
922 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
923 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
924 DwVfpRegister quotient = double_scratch0();
925
Steve Block44f0eee2011-05-26 01:26:41 +0100926 ASSERT(!dividend.is(divisor));
927 ASSERT(!dividend.is(quotient));
928 ASSERT(!divisor.is(quotient));
929 ASSERT(!scratch.is(left));
930 ASSERT(!scratch.is(right));
931 ASSERT(!scratch.is(result));
932
933 Label done, vfp_modulo, both_positive, right_negative;
934
Ben Murdochb8e0da22011-05-16 14:20:40 +0100935 // Check for x % 0.
936 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100937 __ cmp(right, Operand(0));
938 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100939 }
940
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000941 __ Move(result, left);
942
Steve Block44f0eee2011-05-26 01:26:41 +0100943 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100944 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100945 __ b(eq, &done);
946 // Preload right in a vfp register.
947 __ vmov(divisor.low(), right);
948 __ b(lt, &vfp_modulo);
949
950 __ cmp(left, Operand(right));
951 __ b(lt, &done);
952
953 // Check for (positive) power of two on the right hand side.
954 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
955 scratch,
956 &right_negative,
957 &both_positive);
958 // Perform modulo operation (scratch contains right - 1).
959 __ and_(result, scratch, Operand(left));
960 __ b(&done);
961
962 __ bind(&right_negative);
963 // Negate right. The sign of the divisor does not matter.
964 __ rsb(right, right, Operand(0));
965
966 __ bind(&both_positive);
967 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100968 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100969 // left hand side, the left hand side is the result.
970 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100971 __ mov(scratch, left);
972 for (int i = 0; i < kUnfolds; i++) {
973 // Check if the left hand side is less or equal than the
974 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100975 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100976 __ mov(result, scratch, LeaveCC, lt);
977 __ b(lt, &done);
978 // If not, reduce the left hand side by the right hand
979 // side and check again.
980 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
981 }
982
Steve Block44f0eee2011-05-26 01:26:41 +0100983 __ bind(&vfp_modulo);
984 // Load the arguments in VFP registers.
985 // The divisor value is preloaded before. Be careful that 'right' is only live
986 // on entry.
987 __ vmov(dividend.low(), left);
988 // From here on don't use right as it may have been reallocated (for example
989 // to scratch2).
990 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +0100991
Steve Block44f0eee2011-05-26 01:26:41 +0100992 __ vcvt_f64_s32(dividend, dividend.low());
993 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100994
Steve Block44f0eee2011-05-26 01:26:41 +0100995 // We do not care about the sign of the divisor.
996 __ vabs(divisor, divisor);
997 // Compute the quotient and round it to a 32bit integer.
998 __ vdiv(quotient, dividend, divisor);
999 __ vcvt_s32_f64(quotient.low(), quotient);
1000 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001001
Steve Block44f0eee2011-05-26 01:26:41 +01001002 // Compute the remainder in result.
1003 DwVfpRegister double_scratch = dividend;
1004 __ vmul(double_scratch, divisor, quotient);
1005 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
1006 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001007
Steve Block44f0eee2011-05-26 01:26:41 +01001008 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1009 __ sub(result, left, scratch);
1010 } else {
1011 Label ok;
1012 // Check for -0.
1013 __ sub(scratch2, left, scratch, SetCC);
1014 __ b(ne, &ok);
1015 __ cmp(left, Operand(0));
1016 DeoptimizeIf(mi, instr->environment());
1017 __ bind(&ok);
1018 // Load the result and we are done.
1019 __ mov(result, scratch2);
1020 }
1021
Ben Murdochb8e0da22011-05-16 14:20:40 +01001022 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001023}
1024
1025
1026void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001027 class DeferredDivI: public LDeferredCode {
1028 public:
1029 DeferredDivI(LCodeGen* codegen, LDivI* instr)
1030 : LDeferredCode(codegen), instr_(instr) { }
1031 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001032 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001033 }
1034 private:
1035 LDivI* instr_;
1036 };
1037
Steve Block1e0659c2011-05-24 12:43:12 +01001038 const Register left = ToRegister(instr->InputAt(0));
1039 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001040 const Register scratch = scratch0();
1041 const Register result = ToRegister(instr->result());
1042
1043 // Check for x / 0.
1044 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001045 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001046 DeoptimizeIf(eq, instr->environment());
1047 }
1048
1049 // Check for (0 / -x) that will produce negative zero.
1050 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1051 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +01001052 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001053 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01001054 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001055 DeoptimizeIf(mi, instr->environment());
1056 __ bind(&left_not_zero);
1057 }
1058
1059 // Check for (-kMinInt / -1).
1060 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1061 Label left_not_min_int;
1062 __ cmp(left, Operand(kMinInt));
1063 __ b(ne, &left_not_min_int);
1064 __ cmp(right, Operand(-1));
1065 DeoptimizeIf(eq, instr->environment());
1066 __ bind(&left_not_min_int);
1067 }
1068
1069 Label done, deoptimize;
1070 // Test for a few common cases first.
1071 __ cmp(right, Operand(1));
1072 __ mov(result, left, LeaveCC, eq);
1073 __ b(eq, &done);
1074
1075 __ cmp(right, Operand(2));
1076 __ tst(left, Operand(1), eq);
1077 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1078 __ b(eq, &done);
1079
1080 __ cmp(right, Operand(4));
1081 __ tst(left, Operand(3), eq);
1082 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1083 __ b(eq, &done);
1084
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001085 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001086 // to be tagged to Smis. If that is not possible, deoptimize.
1087 DeferredDivI* deferred = new DeferredDivI(this, instr);
1088
1089 __ TrySmiTag(left, &deoptimize, scratch);
1090 __ TrySmiTag(right, &deoptimize, scratch);
1091
1092 __ b(al, deferred->entry());
1093 __ bind(deferred->exit());
1094
1095 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001096 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001097 __ SmiUntag(result);
1098 __ b(&done);
1099
1100 __ bind(&deoptimize);
1101 DeoptimizeIf(al, instr->environment());
1102 __ bind(&done);
1103}
1104
1105
Steve Block1e0659c2011-05-24 12:43:12 +01001106template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001107void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1108 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001109 Register left = ToRegister(instr->InputAt(0));
1110 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001111
Ben Murdoch8b112d22011-06-08 16:22:53 +01001112 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001113 // Move left to r1 and right to r0 for the stub call.
1114 if (left.is(r1)) {
1115 __ Move(r0, right);
1116 } else if (left.is(r0) && right.is(r1)) {
1117 __ Swap(r0, r1, r2);
1118 } else if (left.is(r0)) {
1119 ASSERT(!right.is(r1));
1120 __ mov(r1, r0);
1121 __ mov(r0, right);
1122 } else {
1123 ASSERT(!left.is(r0) && !right.is(r0));
1124 __ mov(r0, right);
1125 __ mov(r1, left);
1126 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001127 BinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001128 __ CallStub(&stub);
1129 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1130 0,
1131 Safepoint::kNoDeoptimizationIndex);
1132 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001133 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001134}
1135
1136
1137void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001138 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001139 Register result = ToRegister(instr->result());
1140 // Note that result may alias left.
Steve Block1e0659c2011-05-24 12:43:12 +01001141 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001142 LOperand* right_op = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001143
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001144 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1145 bool bailout_on_minus_zero =
1146 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001147
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001148 if (right_op->IsConstantOperand() && !can_overflow) {
1149 // Use optimized code for specific constants.
1150 int32_t constant = ToInteger32(LConstantOperand::cast(right_op));
1151
1152 if (bailout_on_minus_zero && (constant < 0)) {
1153 // The case of a null constant will be handled separately.
1154 // If constant is negative and left is null, the result should be -0.
1155 __ cmp(left, Operand(0));
1156 DeoptimizeIf(eq, instr->environment());
1157 }
1158
1159 switch (constant) {
1160 case -1:
1161 __ rsb(result, left, Operand(0));
1162 break;
1163 case 0:
1164 if (bailout_on_minus_zero) {
1165 // If left is strictly negative and the constant is null, the
1166 // result is -0. Deoptimize if required, otherwise return 0.
1167 __ cmp(left, Operand(0));
1168 DeoptimizeIf(mi, instr->environment());
1169 }
1170 __ mov(result, Operand(0));
1171 break;
1172 case 1:
1173 __ Move(result, left);
1174 break;
1175 default:
1176 // Multiplying by powers of two and powers of two plus or minus
1177 // one can be done faster with shifted operands.
1178 // For other constants we emit standard code.
1179 int32_t mask = constant >> 31;
1180 uint32_t constant_abs = (constant + mask) ^ mask;
1181
1182 if (IsPowerOf2(constant_abs) ||
1183 IsPowerOf2(constant_abs - 1) ||
1184 IsPowerOf2(constant_abs + 1)) {
1185 if (IsPowerOf2(constant_abs)) {
1186 int32_t shift = WhichPowerOf2(constant_abs);
1187 __ mov(result, Operand(left, LSL, shift));
1188 } else if (IsPowerOf2(constant_abs - 1)) {
1189 int32_t shift = WhichPowerOf2(constant_abs - 1);
1190 __ add(result, left, Operand(left, LSL, shift));
1191 } else if (IsPowerOf2(constant_abs + 1)) {
1192 int32_t shift = WhichPowerOf2(constant_abs + 1);
1193 __ rsb(result, left, Operand(left, LSL, shift));
1194 }
1195
1196 // Correct the sign of the result is the constant is negative.
1197 if (constant < 0) __ rsb(result, result, Operand(0));
1198
1199 } else {
1200 // Generate standard code.
1201 __ mov(ip, Operand(constant));
1202 __ mul(result, left, ip);
1203 }
1204 }
1205
Ben Murdochb0fe1622011-05-05 13:52:32 +01001206 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001207 Register right = EmitLoadRegister(right_op, scratch);
1208 if (bailout_on_minus_zero) {
1209 __ orr(ToRegister(instr->TempAt(0)), left, right);
1210 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001211
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001212 if (can_overflow) {
1213 // scratch:result = left * right.
1214 __ smull(result, scratch, left, right);
1215 __ cmp(scratch, Operand(result, ASR, 31));
1216 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001217 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001218 __ mul(result, left, right);
1219 }
1220
1221 if (bailout_on_minus_zero) {
1222 // Bail out if the result is supposed to be negative zero.
1223 Label done;
1224 __ cmp(result, Operand(0));
1225 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001226 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001227 DeoptimizeIf(mi, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001228 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001229 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001230 }
1231}
1232
1233
1234void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001235 LOperand* left_op = instr->InputAt(0);
1236 LOperand* right_op = instr->InputAt(1);
1237 ASSERT(left_op->IsRegister());
1238 Register left = ToRegister(left_op);
1239 Register result = ToRegister(instr->result());
1240 Operand right(no_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001241
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001242 if (right_op->IsStackSlot() || right_op->IsArgument()) {
1243 right = Operand(EmitLoadRegister(right_op, ip));
Steve Block44f0eee2011-05-26 01:26:41 +01001244 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001245 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand());
1246 right = ToOperand(right_op);
Steve Block44f0eee2011-05-26 01:26:41 +01001247 }
1248
Ben Murdochb0fe1622011-05-05 13:52:32 +01001249 switch (instr->op()) {
1250 case Token::BIT_AND:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001251 __ and_(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001252 break;
1253 case Token::BIT_OR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001254 __ orr(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001255 break;
1256 case Token::BIT_XOR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001257 __ eor(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001258 break;
1259 default:
1260 UNREACHABLE();
1261 break;
1262 }
1263}
1264
1265
1266void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001267 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
1268 // result may alias either of them.
1269 LOperand* right_op = instr->InputAt(1);
1270 Register left = ToRegister(instr->InputAt(0));
1271 Register result = ToRegister(instr->result());
Steve Block9fac8402011-05-12 15:51:54 +01001272 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001273 if (right_op->IsRegister()) {
1274 // Mask the right_op operand.
1275 __ and_(scratch, ToRegister(right_op), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001276 switch (instr->op()) {
1277 case Token::SAR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001278 __ mov(result, Operand(left, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001279 break;
1280 case Token::SHR:
1281 if (instr->can_deopt()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001282 __ mov(result, Operand(left, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001283 DeoptimizeIf(mi, instr->environment());
1284 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001285 __ mov(result, Operand(left, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001286 }
1287 break;
1288 case Token::SHL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001289 __ mov(result, Operand(left, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001290 break;
1291 default:
1292 UNREACHABLE();
1293 break;
1294 }
1295 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001296 // Mask the right_op operand.
1297 int value = ToInteger32(LConstantOperand::cast(right_op));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001298 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1299 switch (instr->op()) {
1300 case Token::SAR:
1301 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001302 __ mov(result, Operand(left, ASR, shift_count));
1303 } else {
1304 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001305 }
1306 break;
1307 case Token::SHR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001308 if (shift_count != 0) {
1309 __ mov(result, Operand(left, LSR, shift_count));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001310 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001311 if (instr->can_deopt()) {
1312 __ tst(left, Operand(0x80000000));
1313 DeoptimizeIf(ne, instr->environment());
1314 }
1315 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001316 }
1317 break;
1318 case Token::SHL:
1319 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001320 __ mov(result, Operand(left, LSL, shift_count));
1321 } else {
1322 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001323 }
1324 break;
1325 default:
1326 UNREACHABLE();
1327 break;
1328 }
1329 }
1330}
1331
1332
1333void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001334 LOperand* left = instr->InputAt(0);
1335 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001336 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001337 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1338 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1339
1340 if (right->IsStackSlot() || right->IsArgument()) {
1341 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001342 __ sub(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001343 } else {
1344 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001345 __ sub(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001346 }
1347
1348 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001349 DeoptimizeIf(vs, instr->environment());
1350 }
1351}
1352
1353
1354void LCodeGen::DoConstantI(LConstantI* instr) {
1355 ASSERT(instr->result()->IsRegister());
1356 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1357}
1358
1359
1360void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001361 ASSERT(instr->result()->IsDoubleRegister());
1362 DwVfpRegister result = ToDoubleRegister(instr->result());
1363 double v = instr->value();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001364 __ Vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001365}
1366
1367
1368void LCodeGen::DoConstantT(LConstantT* instr) {
1369 ASSERT(instr->result()->IsRegister());
1370 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1371}
1372
1373
Steve Block9fac8402011-05-12 15:51:54 +01001374void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001375 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001376 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001377 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1378}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001379
Ben Murdochb0fe1622011-05-05 13:52:32 +01001380
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001381void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001382 Register result = ToRegister(instr->result());
1383 Register array = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001384 __ ldr(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001385}
1386
1387
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001388void LCodeGen::DoElementsKind(LElementsKind* instr) {
1389 Register result = ToRegister(instr->result());
1390 Register input = ToRegister(instr->InputAt(0));
1391
1392 // Load map into |result|.
1393 __ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
1394 // Load the map's "bit field 2" into |result|. We only need the first byte,
1395 // but the following bit field extraction takes care of that anyway.
1396 __ ldr(result, FieldMemOperand(result, Map::kBitField2Offset));
1397 // Retrieve elements_kind from bit field 2.
1398 __ ubfx(result, result, Map::kElementsKindShift, Map::kElementsKindBitCount);
1399}
1400
1401
Ben Murdochb0fe1622011-05-05 13:52:32 +01001402void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001403 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001404 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001405 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001406 Label done;
1407
1408 // If the object is a smi return the object.
1409 __ tst(input, Operand(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001410 __ Move(result, input, eq);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001411 __ b(eq, &done);
1412
1413 // If the object is not a value type, return the object.
1414 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001415 __ Move(result, input, ne);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001416 __ b(ne, &done);
1417 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1418
1419 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001420}
1421
1422
1423void LCodeGen::DoBitNotI(LBitNotI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001424 Register input = ToRegister(instr->InputAt(0));
1425 Register result = ToRegister(instr->result());
1426 __ mvn(result, Operand(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001427}
1428
1429
1430void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001431 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001432 __ push(input_reg);
1433 CallRuntime(Runtime::kThrow, 1, instr);
1434
1435 if (FLAG_debug_code) {
1436 __ stop("Unreachable code.");
1437 }
1438}
1439
1440
1441void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001442 LOperand* left = instr->InputAt(0);
1443 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001444 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001445 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1446 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001447
Steve Block44f0eee2011-05-26 01:26:41 +01001448 if (right->IsStackSlot() || right->IsArgument()) {
1449 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001450 __ add(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001451 } else {
1452 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001453 __ add(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001454 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001455
Steve Block44f0eee2011-05-26 01:26:41 +01001456 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001457 DeoptimizeIf(vs, instr->environment());
1458 }
1459}
1460
1461
1462void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001463 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1464 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001465 DoubleRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001466 switch (instr->op()) {
1467 case Token::ADD:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001468 __ vadd(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001469 break;
1470 case Token::SUB:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001471 __ vsub(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001472 break;
1473 case Token::MUL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001474 __ vmul(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001475 break;
1476 case Token::DIV:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001477 __ vdiv(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001478 break;
1479 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001480 // Save r0-r3 on the stack.
1481 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1482
Ben Murdoch257744e2011-11-30 15:57:28 +00001483 __ PrepareCallCFunction(0, 2, scratch0());
1484 __ SetCallCDoubleArguments(left, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001485 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00001486 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1487 0, 2);
Steve Block1e0659c2011-05-24 12:43:12 +01001488 // Move the result in the double result register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001489 __ GetCFunctionDoubleResult(result);
Steve Block1e0659c2011-05-24 12:43:12 +01001490
1491 // Restore r0-r3.
1492 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001493 break;
1494 }
1495 default:
1496 UNREACHABLE();
1497 break;
1498 }
1499}
1500
1501
1502void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001503 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1504 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001505 ASSERT(ToRegister(instr->result()).is(r0));
1506
Ben Murdoch257744e2011-11-30 15:57:28 +00001507 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001508 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001509 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001510}
1511
1512
1513int LCodeGen::GetNextEmittedBlock(int block) {
1514 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1515 LLabel* label = chunk_->GetLabel(i);
1516 if (!label->HasReplacement()) return i;
1517 }
1518 return -1;
1519}
1520
1521
1522void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1523 int next_block = GetNextEmittedBlock(current_block_);
1524 right_block = chunk_->LookupDestination(right_block);
1525 left_block = chunk_->LookupDestination(left_block);
1526
1527 if (right_block == left_block) {
1528 EmitGoto(left_block);
1529 } else if (left_block == next_block) {
1530 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1531 } else if (right_block == next_block) {
1532 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1533 } else {
1534 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1535 __ b(chunk_->GetAssemblyLabel(right_block));
1536 }
1537}
1538
1539
1540void LCodeGen::DoBranch(LBranch* instr) {
1541 int true_block = chunk_->LookupDestination(instr->true_block_id());
1542 int false_block = chunk_->LookupDestination(instr->false_block_id());
1543
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001544 Representation r = instr->hydrogen()->value()->representation();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001545 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001546 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001547 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001548 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001549 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001550 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001551 Register scratch = scratch0();
1552
Ben Murdochb8e0da22011-05-16 14:20:40 +01001553 // Test the double value. Zero and NaN are false.
1554 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1555 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch6d7cb002011-08-04 19:25:22 +01001556 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001557 } else {
1558 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001559 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001560 HType type = instr->hydrogen()->value()->type();
1561 if (type.IsBoolean()) {
1562 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001563 EmitBranch(true_block, false_block, eq);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001564 } else if (type.IsSmi()) {
1565 __ cmp(reg, Operand(0));
1566 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001567 } else {
1568 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1569 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1570
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001571 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1572 // Avoid deopts in the case where we've never executed this path before.
1573 if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001574
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001575 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1576 // undefined -> false.
1577 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1578 __ b(eq, false_label);
1579 }
1580 if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1581 // Boolean -> its value.
1582 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1583 __ b(eq, true_label);
1584 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1585 __ b(eq, false_label);
1586 }
1587 if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1588 // 'null' -> false.
1589 __ CompareRoot(reg, Heap::kNullValueRootIndex);
1590 __ b(eq, false_label);
1591 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001592
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001593 if (expected.Contains(ToBooleanStub::SMI)) {
1594 // Smis: 0 -> false, all other -> true.
1595 __ cmp(reg, Operand(0));
1596 __ b(eq, false_label);
1597 __ JumpIfSmi(reg, true_label);
1598 } else if (expected.NeedsMap()) {
1599 // If we need a map later and have a Smi -> deopt.
1600 __ tst(reg, Operand(kSmiTagMask));
1601 DeoptimizeIf(eq, instr->environment());
1602 }
1603
1604 const Register map = scratch0();
1605 if (expected.NeedsMap()) {
1606 __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset));
1607
1608 if (expected.CanBeUndetectable()) {
1609 // Undetectable -> false.
1610 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
1611 __ tst(ip, Operand(1 << Map::kIsUndetectable));
1612 __ b(ne, false_label);
1613 }
1614 }
1615
1616 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1617 // spec object -> true.
1618 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
1619 __ b(ge, true_label);
1620 }
1621
1622 if (expected.Contains(ToBooleanStub::STRING)) {
1623 // String value -> false iff empty.
1624 Label not_string;
1625 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
1626 __ b(ge, &not_string);
1627 __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset));
1628 __ cmp(ip, Operand(0));
1629 __ b(ne, true_label);
1630 __ b(false_label);
1631 __ bind(&not_string);
1632 }
1633
1634 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1635 // heap number -> false iff +0, -0, or NaN.
1636 DoubleRegister dbl_scratch = double_scratch0();
1637 Label not_heap_number;
1638 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1639 __ b(ne, &not_heap_number);
1640 __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
1641 __ VFPCompareAndSetFlags(dbl_scratch, 0.0);
1642 __ b(vs, false_label); // NaN -> false.
1643 __ b(eq, false_label); // +0, -0 -> false.
1644 __ b(true_label);
1645 __ bind(&not_heap_number);
1646 }
1647
1648 // We've seen something for the first time -> deopt.
1649 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001650 }
1651 }
1652}
1653
1654
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001655void LCodeGen::EmitGoto(int block) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001656 block = chunk_->LookupDestination(block);
1657 int next_block = GetNextEmittedBlock(current_block_);
1658 if (block != next_block) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001659 __ jmp(chunk_->GetAssemblyLabel(block));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001660 }
1661}
1662
1663
Ben Murdochb0fe1622011-05-05 13:52:32 +01001664void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001665 EmitGoto(instr->block_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001666}
1667
1668
1669Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001670 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001671 switch (op) {
1672 case Token::EQ:
1673 case Token::EQ_STRICT:
1674 cond = eq;
1675 break;
1676 case Token::LT:
1677 cond = is_unsigned ? lo : lt;
1678 break;
1679 case Token::GT:
1680 cond = is_unsigned ? hi : gt;
1681 break;
1682 case Token::LTE:
1683 cond = is_unsigned ? ls : le;
1684 break;
1685 case Token::GTE:
1686 cond = is_unsigned ? hs : ge;
1687 break;
1688 case Token::IN:
1689 case Token::INSTANCEOF:
1690 default:
1691 UNREACHABLE();
1692 }
1693 return cond;
1694}
1695
1696
1697void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001698 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001699}
1700
1701
Ben Murdochb0fe1622011-05-05 13:52:32 +01001702void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001703 LOperand* left = instr->InputAt(0);
1704 LOperand* right = instr->InputAt(1);
1705 int false_block = chunk_->LookupDestination(instr->false_block_id());
1706 int true_block = chunk_->LookupDestination(instr->true_block_id());
1707
1708 if (instr->is_double()) {
1709 // Compare left and right as doubles and load the
1710 // resulting flags into the normal status register.
1711 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1712 // If a NaN is involved, i.e. the result is unordered (V set),
1713 // jump to false block label.
1714 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1715 } else {
1716 EmitCmpI(left, right);
1717 }
1718
1719 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1720 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001721}
1722
1723
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001724void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001725 Register left = ToRegister(instr->InputAt(0));
1726 Register right = ToRegister(instr->InputAt(1));
1727 int false_block = chunk_->LookupDestination(instr->false_block_id());
1728 int true_block = chunk_->LookupDestination(instr->true_block_id());
1729
1730 __ cmp(left, Operand(right));
1731 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001732}
1733
1734
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001735void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001736 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001737 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001738 int false_block = chunk_->LookupDestination(instr->false_block_id());
Ben Murdoch257744e2011-11-30 15:57:28 +00001739
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001740 __ cmp(left, Operand(instr->hydrogen()->right()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001741 EmitBranch(true_block, false_block, eq);
1742}
1743
1744
Ben Murdochb0fe1622011-05-05 13:52:32 +01001745void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001746 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001747 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001748
1749 // TODO(fsc): If the expression is known to be a smi, then it's
1750 // definitely not null. Jump to the false block.
1751
1752 int true_block = chunk_->LookupDestination(instr->true_block_id());
1753 int false_block = chunk_->LookupDestination(instr->false_block_id());
1754
1755 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1756 __ cmp(reg, ip);
1757 if (instr->is_strict()) {
1758 EmitBranch(true_block, false_block, eq);
1759 } else {
1760 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1761 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1762 __ b(eq, true_label);
1763 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1764 __ cmp(reg, ip);
1765 __ b(eq, true_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001766 __ JumpIfSmi(reg, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001767 // Check for undetectable objects by looking in the bit field in
1768 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001769 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1770 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1771 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1772 EmitBranch(true_block, false_block, ne);
1773 }
1774}
1775
1776
1777Condition LCodeGen::EmitIsObject(Register input,
1778 Register temp1,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001779 Label* is_not_object,
1780 Label* is_object) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001781 Register temp2 = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001782 __ JumpIfSmi(input, is_not_object);
1783
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001784 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
1785 __ cmp(input, temp2);
Steve Block1e0659c2011-05-24 12:43:12 +01001786 __ b(eq, is_object);
1787
1788 // Load map.
1789 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1790 // Undetectable objects behave like undefined.
1791 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1792 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1793 __ b(ne, is_not_object);
1794
1795 // Load instance type and check that it is in object type range.
1796 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001797 __ cmp(temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001798 __ b(lt, is_not_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001799 __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001800 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001801}
1802
1803
Ben Murdochb0fe1622011-05-05 13:52:32 +01001804void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001805 Register reg = ToRegister(instr->InputAt(0));
1806 Register temp1 = ToRegister(instr->TempAt(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001807
1808 int true_block = chunk_->LookupDestination(instr->true_block_id());
1809 int false_block = chunk_->LookupDestination(instr->false_block_id());
1810 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1811 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1812
1813 Condition true_cond =
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001814 EmitIsObject(reg, temp1, false_label, true_label);
Steve Block1e0659c2011-05-24 12:43:12 +01001815
1816 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001817}
1818
1819
Ben Murdochb0fe1622011-05-05 13:52:32 +01001820void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1821 int true_block = chunk_->LookupDestination(instr->true_block_id());
1822 int false_block = chunk_->LookupDestination(instr->false_block_id());
1823
Steve Block1e0659c2011-05-24 12:43:12 +01001824 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001825 __ tst(input_reg, Operand(kSmiTagMask));
1826 EmitBranch(true_block, false_block, eq);
1827}
1828
1829
Ben Murdoch257744e2011-11-30 15:57:28 +00001830void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1831 Register input = ToRegister(instr->InputAt(0));
1832 Register temp = ToRegister(instr->TempAt(0));
1833
1834 int true_block = chunk_->LookupDestination(instr->true_block_id());
1835 int false_block = chunk_->LookupDestination(instr->false_block_id());
1836
1837 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1838 __ ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
1839 __ ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
1840 __ tst(temp, Operand(1 << Map::kIsUndetectable));
1841 EmitBranch(true_block, false_block, ne);
1842}
1843
1844
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001845static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001846 InstanceType from = instr->from();
1847 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001848 if (from == FIRST_TYPE) return to;
1849 ASSERT(from == to || to == LAST_TYPE);
1850 return from;
1851}
1852
1853
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001854static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001855 InstanceType from = instr->from();
1856 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001857 if (from == to) return eq;
1858 if (to == LAST_TYPE) return hs;
1859 if (from == FIRST_TYPE) return ls;
1860 UNREACHABLE();
1861 return eq;
1862}
1863
1864
Ben Murdochb0fe1622011-05-05 13:52:32 +01001865void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001866 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001867 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001868
1869 int true_block = chunk_->LookupDestination(instr->true_block_id());
1870 int false_block = chunk_->LookupDestination(instr->false_block_id());
1871
1872 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1873
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001874 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001875
Steve Block1e0659c2011-05-24 12:43:12 +01001876 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1877 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001878}
1879
1880
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001881void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1882 Register input = ToRegister(instr->InputAt(0));
1883 Register result = ToRegister(instr->result());
1884
1885 if (FLAG_debug_code) {
1886 __ AbortIfNotString(input);
1887 }
1888
1889 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1890 __ IndexFromHash(result, result);
1891}
1892
1893
Ben Murdochb0fe1622011-05-05 13:52:32 +01001894void LCodeGen::DoHasCachedArrayIndexAndBranch(
1895 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001896 Register input = ToRegister(instr->InputAt(0));
1897 Register scratch = scratch0();
1898
1899 int true_block = chunk_->LookupDestination(instr->true_block_id());
1900 int false_block = chunk_->LookupDestination(instr->false_block_id());
1901
1902 __ ldr(scratch,
1903 FieldMemOperand(input, String::kHashFieldOffset));
1904 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1905 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001906}
1907
1908
Ben Murdochb8e0da22011-05-16 14:20:40 +01001909// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001910// the temp registers, but not the input. Only input and temp2 may alias.
1911void LCodeGen::EmitClassOfTest(Label* is_true,
1912 Label* is_false,
1913 Handle<String>class_name,
1914 Register input,
1915 Register temp,
1916 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001917 ASSERT(!input.is(temp));
1918 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001919 __ JumpIfSmi(input, is_false);
1920 __ CompareObjectType(input, temp, temp2, FIRST_SPEC_OBJECT_TYPE);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001921 __ b(lt, is_false);
1922
1923 // Map is now in temp.
1924 // Functions have class 'Function'.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001925 __ CompareInstanceType(temp, temp2, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001926 if (class_name->IsEqualTo(CStrVector("Function"))) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001927 __ b(ge, is_true);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001928 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001929 __ b(ge, is_false);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001930 }
1931
1932 // Check if the constructor in the map is a function.
1933 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1934
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001935 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type and
1936 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1937 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1938 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1939 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1940 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001941
1942 // Objects with a non-function constructor have class 'Object'.
1943 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1944 if (class_name->IsEqualTo(CStrVector("Object"))) {
1945 __ b(ne, is_true);
1946 } else {
1947 __ b(ne, is_false);
1948 }
1949
1950 // temp now contains the constructor function. Grab the
1951 // instance class name from there.
1952 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1953 __ ldr(temp, FieldMemOperand(temp,
1954 SharedFunctionInfo::kInstanceClassNameOffset));
1955 // The class name we are testing against is a symbol because it's a literal.
1956 // The name in the constructor is a symbol because of the way the context is
1957 // booted. This routine isn't expected to work for random API-created
1958 // classes and it doesn't have to because you can't access it with natives
1959 // syntax. Since both sides are symbols it is sufficient to use an identity
1960 // comparison.
1961 __ cmp(temp, Operand(class_name));
1962 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001963}
1964
1965
Ben Murdochb0fe1622011-05-05 13:52:32 +01001966void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001967 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001968 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001969 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001970 Handle<String> class_name = instr->hydrogen()->class_name();
1971
1972 int true_block = chunk_->LookupDestination(instr->true_block_id());
1973 int false_block = chunk_->LookupDestination(instr->false_block_id());
1974
1975 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1976 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1977
1978 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1979
1980 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001981}
1982
1983
1984void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001985 Register reg = ToRegister(instr->InputAt(0));
1986 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001987 int true_block = instr->true_block_id();
1988 int false_block = instr->false_block_id();
1989
1990 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1991 __ cmp(temp, Operand(instr->map()));
1992 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001993}
1994
1995
1996void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001997 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1998 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01001999
Ben Murdochb0fe1622011-05-05 13:52:32 +01002000 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2001 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2002
Steve Block44f0eee2011-05-26 01:26:41 +01002003 __ cmp(r0, Operand(0));
2004 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
2005 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002006}
2007
2008
Ben Murdoch086aeea2011-05-13 15:57:08 +01002009void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002010 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2011 public:
2012 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2013 LInstanceOfKnownGlobal* instr)
2014 : LDeferredCode(codegen), instr_(instr) { }
2015 virtual void Generate() {
2016 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
2017 }
2018
2019 Label* map_check() { return &map_check_; }
2020
2021 private:
2022 LInstanceOfKnownGlobal* instr_;
2023 Label map_check_;
2024 };
2025
2026 DeferredInstanceOfKnownGlobal* deferred;
2027 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2028
2029 Label done, false_result;
2030 Register object = ToRegister(instr->InputAt(0));
2031 Register temp = ToRegister(instr->TempAt(0));
2032 Register result = ToRegister(instr->result());
2033
2034 ASSERT(object.is(r0));
2035 ASSERT(result.is(r0));
2036
2037 // A Smi is not instance of anything.
2038 __ JumpIfSmi(object, &false_result);
2039
2040 // This is the inlined call site instanceof cache. The two occurences of the
2041 // hole value will be patched to the last map/result pair generated by the
2042 // instanceof stub.
2043 Label cache_miss;
2044 Register map = temp;
2045 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2046 __ bind(deferred->map_check()); // Label for calculating code patching.
2047 // We use Factory::the_hole_value() on purpose instead of loading from the
2048 // root array to force relocation to be able to later patch with
2049 // the cached map.
Steve Block44f0eee2011-05-26 01:26:41 +01002050 __ mov(ip, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002051 __ cmp(map, Operand(ip));
2052 __ b(ne, &cache_miss);
2053 // We use Factory::the_hole_value() on purpose instead of loading from the
2054 // root array to force relocation to be able to later patch
2055 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002056 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002057 __ b(&done);
2058
2059 // The inlined call site cache did not match. Check null and string before
2060 // calling the deferred code.
2061 __ bind(&cache_miss);
2062 // Null is not instance of anything.
2063 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2064 __ cmp(object, Operand(ip));
2065 __ b(eq, &false_result);
2066
2067 // String values is not instance of anything.
2068 Condition is_string = masm_->IsObjectStringType(object, temp);
2069 __ b(is_string, &false_result);
2070
2071 // Go to the deferred code.
2072 __ b(deferred->entry());
2073
2074 __ bind(&false_result);
2075 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2076
2077 // Here result has either true or false. Deferred code also produces true or
2078 // false object.
2079 __ bind(deferred->exit());
2080 __ bind(&done);
2081}
2082
2083
2084void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2085 Label* map_check) {
2086 Register result = ToRegister(instr->result());
2087 ASSERT(result.is(r0));
2088
2089 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2090 flags = static_cast<InstanceofStub::Flags>(
2091 flags | InstanceofStub::kArgsInRegisters);
2092 flags = static_cast<InstanceofStub::Flags>(
2093 flags | InstanceofStub::kCallSiteInlineCheck);
2094 flags = static_cast<InstanceofStub::Flags>(
2095 flags | InstanceofStub::kReturnTrueFalseObject);
2096 InstanceofStub stub(flags);
2097
Ben Murdoch8b112d22011-06-08 16:22:53 +01002098 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002099
2100 // Get the temp register reserved by the instruction. This needs to be r4 as
2101 // its slot of the pushing of safepoint registers is used to communicate the
2102 // offset to the location of the map check.
2103 Register temp = ToRegister(instr->TempAt(0));
2104 ASSERT(temp.is(r4));
2105 __ mov(InstanceofStub::right(), Operand(instr->function()));
2106 static const int kAdditionalDelta = 4;
2107 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2108 Label before_push_delta;
2109 __ bind(&before_push_delta);
2110 __ BlockConstPoolFor(kAdditionalDelta);
2111 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002112 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002113 CallCodeGeneric(stub.GetCode(),
2114 RelocInfo::CODE_TARGET,
2115 instr,
2116 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Steve Block1e0659c2011-05-24 12:43:12 +01002117 // Put the result value into the result register slot and
2118 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002119 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002120}
2121
Ben Murdochb0fe1622011-05-05 13:52:32 +01002122
2123static Condition ComputeCompareCondition(Token::Value op) {
2124 switch (op) {
2125 case Token::EQ_STRICT:
2126 case Token::EQ:
2127 return eq;
2128 case Token::LT:
2129 return lt;
2130 case Token::GT:
2131 return gt;
2132 case Token::LTE:
2133 return le;
2134 case Token::GTE:
2135 return ge;
2136 default:
2137 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002138 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002139 }
2140}
2141
2142
2143void LCodeGen::DoCmpT(LCmpT* instr) {
2144 Token::Value op = instr->op();
2145
2146 Handle<Code> ic = CompareIC::GetUninitialized(op);
2147 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002148 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002149
2150 Condition condition = ComputeCompareCondition(op);
2151 if (op == Token::GT || op == Token::LTE) {
2152 condition = ReverseCondition(condition);
2153 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002154 __ LoadRoot(ToRegister(instr->result()),
2155 Heap::kTrueValueRootIndex,
2156 condition);
2157 __ LoadRoot(ToRegister(instr->result()),
2158 Heap::kFalseValueRootIndex,
2159 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002160}
2161
2162
Ben Murdochb0fe1622011-05-05 13:52:32 +01002163void LCodeGen::DoReturn(LReturn* instr) {
2164 if (FLAG_trace) {
2165 // Push the return value on the stack as the parameter.
2166 // Runtime::TraceExit returns its parameter in r0.
2167 __ push(r0);
2168 __ CallRuntime(Runtime::kTraceExit, 1);
2169 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002170 int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002171 __ mov(sp, fp);
2172 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2173 __ add(sp, sp, Operand(sp_delta));
2174 __ Jump(lr);
2175}
2176
2177
Ben Murdoch8b112d22011-06-08 16:22:53 +01002178void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002179 Register result = ToRegister(instr->result());
2180 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2181 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2182 if (instr->hydrogen()->check_hole_value()) {
2183 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2184 __ cmp(result, ip);
2185 DeoptimizeIf(eq, instr->environment());
2186 }
2187}
2188
2189
Ben Murdoch8b112d22011-06-08 16:22:53 +01002190void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2191 ASSERT(ToRegister(instr->global_object()).is(r0));
2192 ASSERT(ToRegister(instr->result()).is(r0));
2193
2194 __ mov(r2, Operand(instr->name()));
2195 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2196 : RelocInfo::CODE_TARGET_CONTEXT;
2197 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2198 CallCode(ic, mode, instr);
2199}
2200
2201
2202void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002203 Register value = ToRegister(instr->InputAt(0));
2204 Register scratch = scratch0();
2205
2206 // Load the cell.
2207 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2208
2209 // If the cell we are storing to contains the hole it could have
2210 // been deleted from the property dictionary. In that case, we need
2211 // to update the property details in the property dictionary to mark
2212 // it as no longer deleted.
2213 if (instr->hydrogen()->check_hole_value()) {
2214 Register scratch2 = ToRegister(instr->TempAt(0));
2215 __ ldr(scratch2,
2216 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2217 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2218 __ cmp(scratch2, ip);
2219 DeoptimizeIf(eq, instr->environment());
2220 }
2221
2222 // Store the value.
2223 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002224}
2225
2226
Ben Murdoch8b112d22011-06-08 16:22:53 +01002227void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2228 ASSERT(ToRegister(instr->global_object()).is(r1));
2229 ASSERT(ToRegister(instr->value()).is(r0));
2230
2231 __ mov(r2, Operand(instr->name()));
2232 Handle<Code> ic = instr->strict_mode()
2233 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2234 : isolate()->builtins()->StoreIC_Initialize();
2235 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2236}
2237
2238
Ben Murdochb8e0da22011-05-16 14:20:40 +01002239void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002240 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002241 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002242 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002243}
2244
2245
Steve Block1e0659c2011-05-24 12:43:12 +01002246void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2247 Register context = ToRegister(instr->context());
2248 Register value = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002249 __ str(value, ContextOperand(context, instr->slot_index()));
2250 if (instr->needs_write_barrier()) {
2251 int offset = Context::SlotOffset(instr->slot_index());
2252 __ RecordWrite(context, Operand(offset), value, scratch0());
2253 }
2254}
2255
2256
Ben Murdochb0fe1622011-05-05 13:52:32 +01002257void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002258 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002259 Register result = ToRegister(instr->result());
2260 if (instr->hydrogen()->is_in_object()) {
2261 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2262 } else {
2263 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2264 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2265 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002266}
2267
2268
Ben Murdoch257744e2011-11-30 15:57:28 +00002269void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2270 Register object,
2271 Handle<Map> type,
2272 Handle<String> name) {
Steve Block44f0eee2011-05-26 01:26:41 +01002273 LookupResult lookup;
2274 type->LookupInDescriptors(NULL, *name, &lookup);
Ben Murdoch257744e2011-11-30 15:57:28 +00002275 ASSERT(lookup.IsProperty() &&
2276 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2277 if (lookup.type() == FIELD) {
2278 int index = lookup.GetLocalFieldIndexFromMap(*type);
2279 int offset = index * kPointerSize;
2280 if (index < 0) {
2281 // Negative property indices are in-object properties, indexed
2282 // from the end of the fixed part of the object.
2283 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2284 } else {
2285 // Non-negative property indices are in the properties array.
2286 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2287 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2288 }
Steve Block44f0eee2011-05-26 01:26:41 +01002289 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002290 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
2291 LoadHeapObject(result, Handle<HeapObject>::cast(function));
Steve Block44f0eee2011-05-26 01:26:41 +01002292 }
2293}
2294
2295
2296void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2297 Register object = ToRegister(instr->object());
2298 Register result = ToRegister(instr->result());
2299 Register scratch = scratch0();
2300 int map_count = instr->hydrogen()->types()->length();
2301 Handle<String> name = instr->hydrogen()->name();
2302 if (map_count == 0) {
2303 ASSERT(instr->hydrogen()->need_generic());
2304 __ mov(r2, Operand(name));
2305 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2306 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2307 } else {
2308 Label done;
2309 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2310 for (int i = 0; i < map_count - 1; ++i) {
2311 Handle<Map> map = instr->hydrogen()->types()->at(i);
2312 Label next;
2313 __ cmp(scratch, Operand(map));
2314 __ b(ne, &next);
Ben Murdoch257744e2011-11-30 15:57:28 +00002315 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002316 __ b(&done);
2317 __ bind(&next);
2318 }
2319 Handle<Map> map = instr->hydrogen()->types()->last();
2320 __ cmp(scratch, Operand(map));
2321 if (instr->hydrogen()->need_generic()) {
2322 Label generic;
2323 __ b(ne, &generic);
Ben Murdoch257744e2011-11-30 15:57:28 +00002324 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002325 __ b(&done);
2326 __ bind(&generic);
2327 __ mov(r2, Operand(name));
2328 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2329 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2330 } else {
2331 DeoptimizeIf(ne, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002332 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002333 }
2334 __ bind(&done);
2335 }
2336}
2337
2338
Ben Murdochb0fe1622011-05-05 13:52:32 +01002339void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2340 ASSERT(ToRegister(instr->object()).is(r0));
2341 ASSERT(ToRegister(instr->result()).is(r0));
2342
2343 // Name is always in r2.
2344 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002345 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002346 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2347}
2348
2349
Steve Block9fac8402011-05-12 15:51:54 +01002350void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2351 Register scratch = scratch0();
2352 Register function = ToRegister(instr->function());
2353 Register result = ToRegister(instr->result());
2354
2355 // Check that the function really is a function. Load map into the
2356 // result register.
2357 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2358 DeoptimizeIf(ne, instr->environment());
2359
2360 // Make sure that the function has an instance prototype.
2361 Label non_instance;
2362 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2363 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2364 __ b(ne, &non_instance);
2365
2366 // Get the prototype or initial map from the function.
2367 __ ldr(result,
2368 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2369
2370 // Check that the function has a prototype or an initial map.
2371 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2372 __ cmp(result, ip);
2373 DeoptimizeIf(eq, instr->environment());
2374
2375 // If the function does not have an initial map, we're done.
2376 Label done;
2377 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2378 __ b(ne, &done);
2379
2380 // Get the prototype from the initial map.
2381 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2382 __ jmp(&done);
2383
2384 // Non-instance prototype: Fetch prototype from constructor field
2385 // in initial map.
2386 __ bind(&non_instance);
2387 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2388
2389 // All done.
2390 __ bind(&done);
2391}
2392
2393
Ben Murdochb0fe1622011-05-05 13:52:32 +01002394void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002395 Register result = ToRegister(instr->result());
2396 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002397 Register scratch = scratch0();
2398
Steve Block1e0659c2011-05-24 12:43:12 +01002399 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002400 if (FLAG_debug_code) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002401 Label done, fail;
Steve Block1e0659c2011-05-24 12:43:12 +01002402 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002403 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2404 __ cmp(scratch, ip);
2405 __ b(eq, &done);
2406 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2407 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002408 __ b(eq, &done);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002409 // |scratch| still contains |input|'s map.
2410 __ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
2411 __ ubfx(scratch, scratch, Map::kElementsKindShift,
2412 Map::kElementsKindBitCount);
2413 __ cmp(scratch, Operand(JSObject::FAST_ELEMENTS));
2414 __ b(eq, &done);
2415 __ cmp(scratch, Operand(JSObject::FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2416 __ b(lt, &fail);
2417 __ cmp(scratch, Operand(JSObject::LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2418 __ b(le, &done);
2419 __ bind(&fail);
2420 __ Abort("Check for fast or external elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002421 __ bind(&done);
2422 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002423}
2424
2425
Steve Block44f0eee2011-05-26 01:26:41 +01002426void LCodeGen::DoLoadExternalArrayPointer(
2427 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002428 Register to_reg = ToRegister(instr->result());
2429 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002430 __ ldr(to_reg, FieldMemOperand(from_reg,
2431 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002432}
2433
2434
Ben Murdochb0fe1622011-05-05 13:52:32 +01002435void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002436 Register arguments = ToRegister(instr->arguments());
2437 Register length = ToRegister(instr->length());
2438 Register index = ToRegister(instr->index());
2439 Register result = ToRegister(instr->result());
2440
2441 // Bailout index is not a valid argument index. Use unsigned check to get
2442 // negative check for free.
2443 __ sub(length, length, index, SetCC);
2444 DeoptimizeIf(ls, instr->environment());
2445
2446 // There are two words between the frame pointer and the last argument.
2447 // Subtracting from length accounts for one of them add one more.
2448 __ add(length, length, Operand(1));
2449 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002450}
2451
2452
2453void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002454 Register elements = ToRegister(instr->elements());
2455 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002456 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002457 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01002458
2459 // Load the result.
2460 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2461 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2462
Ben Murdochb8e0da22011-05-16 14:20:40 +01002463 // Check for the hole value.
Ben Murdoch257744e2011-11-30 15:57:28 +00002464 if (instr->hydrogen()->RequiresHoleCheck()) {
2465 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2466 __ cmp(result, scratch);
2467 DeoptimizeIf(eq, instr->environment());
2468 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002469}
2470
2471
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002472void LCodeGen::DoLoadKeyedFastDoubleElement(
2473 LLoadKeyedFastDoubleElement* instr) {
2474 Register elements = ToRegister(instr->elements());
2475 bool key_is_constant = instr->key()->IsConstantOperand();
2476 Register key = no_reg;
2477 DwVfpRegister result = ToDoubleRegister(instr->result());
2478 Register scratch = scratch0();
2479
2480 int shift_size =
2481 ElementsKindToShiftSize(JSObject::FAST_DOUBLE_ELEMENTS);
2482 int constant_key = 0;
2483 if (key_is_constant) {
2484 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2485 if (constant_key & 0xF0000000) {
2486 Abort("array index constant value too big.");
2487 }
2488 } else {
2489 key = ToRegister(instr->key());
2490 }
2491
2492 Operand operand = key_is_constant
2493 ? Operand(constant_key * (1 << shift_size) +
2494 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
2495 : Operand(key, LSL, shift_size);
2496 __ add(elements, elements, operand);
2497 if (!key_is_constant) {
2498 __ add(elements, elements,
2499 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2500 }
2501
2502 if (instr->hydrogen()->RequiresHoleCheck()) {
2503 // TODO(danno): If no hole check is required, there is no need to allocate
2504 // elements into a temporary register, instead scratch can be used.
2505 __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2506 __ cmp(scratch, Operand(kHoleNanUpper32));
2507 DeoptimizeIf(eq, instr->environment());
2508 }
2509
2510 __ vldr(result, elements, 0);
2511}
2512
2513
Steve Block44f0eee2011-05-26 01:26:41 +01002514void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2515 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002516 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00002517 Register key = no_reg;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002518 JSObject::ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00002519 bool key_is_constant = instr->key()->IsConstantOperand();
2520 int constant_key = 0;
2521 if (key_is_constant) {
2522 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2523 if (constant_key & 0xF0000000) {
2524 Abort("array index constant value too big.");
2525 }
2526 } else {
2527 key = ToRegister(instr->key());
2528 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002529 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00002530
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002531 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS ||
2532 elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002533 CpuFeatures::Scope scope(VFP3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002534 DwVfpRegister result = ToDoubleRegister(instr->result());
2535 Operand operand = key_is_constant
2536 ? Operand(constant_key * (1 << shift_size))
2537 : Operand(key, LSL, shift_size);
Ben Murdoch257744e2011-11-30 15:57:28 +00002538 __ add(scratch0(), external_pointer, operand);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002539 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002540 __ vldr(result.low(), scratch0(), 0);
2541 __ vcvt_f64_f32(result, result.low());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002542 } else { // i.e. elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00002543 __ vldr(result, scratch0(), 0);
2544 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01002545 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002546 Register result = ToRegister(instr->result());
Ben Murdoch257744e2011-11-30 15:57:28 +00002547 MemOperand mem_operand(key_is_constant
2548 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
2549 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002550 switch (elements_kind) {
2551 case JSObject::EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002552 __ ldrsb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002553 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002554 case JSObject::EXTERNAL_PIXEL_ELEMENTS:
2555 case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002556 __ ldrb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002557 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002558 case JSObject::EXTERNAL_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002559 __ ldrsh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002560 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002561 case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002562 __ ldrh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002563 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002564 case JSObject::EXTERNAL_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002565 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002566 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002567 case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002568 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002569 __ cmp(result, Operand(0x80000000));
2570 // TODO(danno): we could be more clever here, perhaps having a special
2571 // version of the stub that detects if the overflow case actually
2572 // happens, and generate code that returns a double rather than int.
2573 DeoptimizeIf(cs, instr->environment());
2574 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002575 case JSObject::EXTERNAL_FLOAT_ELEMENTS:
2576 case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
2577 case JSObject::FAST_DOUBLE_ELEMENTS:
2578 case JSObject::FAST_ELEMENTS:
2579 case JSObject::DICTIONARY_ELEMENTS:
2580 case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01002581 UNREACHABLE();
2582 break;
2583 }
2584 }
Steve Block1e0659c2011-05-24 12:43:12 +01002585}
2586
2587
Ben Murdochb0fe1622011-05-05 13:52:32 +01002588void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2589 ASSERT(ToRegister(instr->object()).is(r1));
2590 ASSERT(ToRegister(instr->key()).is(r0));
2591
Steve Block44f0eee2011-05-26 01:26:41 +01002592 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002593 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2594}
2595
2596
2597void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002598 Register scratch = scratch0();
2599 Register result = ToRegister(instr->result());
2600
2601 // Check if the calling frame is an arguments adaptor frame.
2602 Label done, adapted;
2603 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2604 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2605 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2606
2607 // Result is the frame pointer for the frame if not adapted and for the real
2608 // frame below the adaptor frame if adapted.
2609 __ mov(result, fp, LeaveCC, ne);
2610 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002611}
2612
2613
2614void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002615 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002616 Register result = ToRegister(instr->result());
2617
2618 Label done;
2619
2620 // If no arguments adaptor frame the number of arguments is fixed.
2621 __ cmp(fp, elem);
2622 __ mov(result, Operand(scope()->num_parameters()));
2623 __ b(eq, &done);
2624
2625 // Arguments adaptor frame present. Get argument length from there.
2626 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2627 __ ldr(result,
2628 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2629 __ SmiUntag(result);
2630
2631 // Argument length is in result register.
2632 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002633}
2634
2635
2636void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002637 Register receiver = ToRegister(instr->receiver());
2638 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002639 Register length = ToRegister(instr->length());
2640 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002641 Register scratch = scratch0();
2642 ASSERT(receiver.is(r0)); // Used for parameter count.
2643 ASSERT(function.is(r1)); // Required by InvokeFunction.
2644 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002645
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002646 // If the receiver is null or undefined, we have to pass the global
2647 // object as a receiver to normal functions. Values have to be
2648 // passed unchanged to builtins and strict-mode functions.
Steve Block1e0659c2011-05-24 12:43:12 +01002649 Label global_object, receiver_ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002650
2651 // Do not transform the receiver to object for strict mode
2652 // functions.
2653 __ ldr(scratch,
2654 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2655 __ ldr(scratch,
2656 FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
2657 __ tst(scratch,
2658 Operand(1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize)));
2659 __ b(ne, &receiver_ok);
2660
2661 // Do not transform the receiver to object for builtins.
2662 __ tst(scratch, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
2663 __ b(ne, &receiver_ok);
2664
2665 // Normal function. Replace undefined or null with global receiver.
Steve Block1e0659c2011-05-24 12:43:12 +01002666 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2667 __ cmp(receiver, scratch);
2668 __ b(eq, &global_object);
2669 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2670 __ cmp(receiver, scratch);
2671 __ b(eq, &global_object);
2672
2673 // Deoptimize if the receiver is not a JS object.
2674 __ tst(receiver, Operand(kSmiTagMask));
2675 DeoptimizeIf(eq, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002676 __ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE);
2677 DeoptimizeIf(lt, instr->environment());
Steve Block1e0659c2011-05-24 12:43:12 +01002678 __ jmp(&receiver_ok);
2679
2680 __ bind(&global_object);
2681 __ ldr(receiver, GlobalObjectOperand());
Ben Murdoch257744e2011-11-30 15:57:28 +00002682 __ ldr(receiver,
2683 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002684 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002685
2686 // Copy the arguments to this function possibly from the
2687 // adaptor frame below it.
2688 const uint32_t kArgumentsLimit = 1 * KB;
2689 __ cmp(length, Operand(kArgumentsLimit));
2690 DeoptimizeIf(hi, instr->environment());
2691
2692 // Push the receiver and use the register to keep the original
2693 // number of arguments.
2694 __ push(receiver);
2695 __ mov(receiver, length);
2696 // The arguments are at a one pointer size offset from elements.
2697 __ add(elements, elements, Operand(1 * kPointerSize));
2698
2699 // Loop through the arguments pushing them onto the execution
2700 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002701 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002702 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002703 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002704 __ b(eq, &invoke);
2705 __ bind(&loop);
2706 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2707 __ push(scratch);
2708 __ sub(length, length, Operand(1), SetCC);
2709 __ b(ne, &loop);
2710
2711 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002712 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2713 LPointerMap* pointers = instr->pointer_map();
2714 LEnvironment* env = instr->deoptimization_environment();
2715 RecordPosition(pointers->position());
2716 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002717 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002718 pointers,
2719 env->deoptimization_index());
2720 // The number of arguments is stored in receiver which is r0, as expected
2721 // by InvokeFunction.
2722 v8::internal::ParameterCount actual(receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00002723 __ InvokeFunction(function, actual, CALL_FUNCTION,
2724 safepoint_generator, CALL_AS_METHOD);
Steve Block1e0659c2011-05-24 12:43:12 +01002725 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002726}
2727
2728
2729void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002730 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002731 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2732 Abort("DoPushArgument not implemented for double type.");
2733 } else {
2734 Register argument_reg = EmitLoadRegister(argument, ip);
2735 __ push(argument_reg);
2736 }
2737}
2738
2739
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002740void LCodeGen::DoThisFunction(LThisFunction* instr) {
2741 Register result = ToRegister(instr->result());
2742 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2743}
2744
2745
Steve Block1e0659c2011-05-24 12:43:12 +01002746void LCodeGen::DoContext(LContext* instr) {
2747 Register result = ToRegister(instr->result());
2748 __ mov(result, cp);
2749}
2750
2751
2752void LCodeGen::DoOuterContext(LOuterContext* instr) {
2753 Register context = ToRegister(instr->context());
2754 Register result = ToRegister(instr->result());
2755 __ ldr(result,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002756 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block1e0659c2011-05-24 12:43:12 +01002757}
2758
2759
Ben Murdochb0fe1622011-05-05 13:52:32 +01002760void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2761 Register result = ToRegister(instr->result());
2762 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2763}
2764
2765
2766void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002767 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002768 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002769 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002770}
2771
2772
2773void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2774 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00002775 LInstruction* instr,
2776 CallKind call_kind) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002777 // Change context if needed.
2778 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002779 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002780 scope()->contains_with() ||
2781 (scope()->num_heap_slots() > 0);
2782 if (change_context) {
2783 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2784 }
2785
2786 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2787 // is available to write to at this point.
2788 if (!function->NeedsArgumentsAdaption()) {
2789 __ mov(r0, Operand(arity));
2790 }
2791
2792 LPointerMap* pointers = instr->pointer_map();
2793 RecordPosition(pointers->position());
2794
2795 // Invoke function.
Ben Murdoch257744e2011-11-30 15:57:28 +00002796 __ SetCallKind(r5, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002797 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2798 __ Call(ip);
2799
2800 // Setup deoptimization.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002801 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002802
2803 // Restore context.
2804 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2805}
2806
2807
2808void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002809 ASSERT(ToRegister(instr->result()).is(r0));
2810 __ mov(r1, Operand(instr->function()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002811 CallKnownFunction(instr->function(),
2812 instr->arity(),
2813 instr,
2814 CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002815}
2816
2817
2818void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002819 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002820 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002821 Register scratch = scratch0();
2822
2823 // Deoptimize if not a heap number.
2824 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2825 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2826 __ cmp(scratch, Operand(ip));
2827 DeoptimizeIf(ne, instr->environment());
2828
2829 Label done;
2830 Register exponent = scratch0();
2831 scratch = no_reg;
2832 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2833 // Check the sign of the argument. If the argument is positive, just
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002834 // return it.
Steve Block1e0659c2011-05-24 12:43:12 +01002835 __ tst(exponent, Operand(HeapNumber::kSignMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002836 // Move the input to the result if necessary.
2837 __ Move(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01002838 __ b(eq, &done);
2839
2840 // Input is negative. Reverse its sign.
2841 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002842 {
2843 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002844
Ben Murdoch8b112d22011-06-08 16:22:53 +01002845 // Registers were saved at the safepoint, so we can use
2846 // many scratch registers.
2847 Register tmp1 = input.is(r1) ? r0 : r1;
2848 Register tmp2 = input.is(r2) ? r0 : r2;
2849 Register tmp3 = input.is(r3) ? r0 : r3;
2850 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01002851
Ben Murdoch8b112d22011-06-08 16:22:53 +01002852 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01002853
Ben Murdoch8b112d22011-06-08 16:22:53 +01002854 Label allocated, slow;
2855 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2856 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2857 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01002858
Ben Murdoch8b112d22011-06-08 16:22:53 +01002859 // Slow case: Call the runtime system to do the number allocation.
2860 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01002861
Ben Murdoch8b112d22011-06-08 16:22:53 +01002862 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2863 // Set the pointer to the new heap number in tmp.
2864 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2865 // Restore input_reg after call to runtime.
2866 __ LoadFromSafepointRegisterSlot(input, input);
2867 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002868
Ben Murdoch8b112d22011-06-08 16:22:53 +01002869 __ bind(&allocated);
2870 // exponent: floating point exponent value.
2871 // tmp1: allocated heap number.
2872 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2873 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2874 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2875 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002876
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002877 __ StoreToSafepointRegisterSlot(tmp1, result);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002878 }
Steve Block1e0659c2011-05-24 12:43:12 +01002879
2880 __ bind(&done);
2881}
2882
2883
2884void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2885 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002886 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002887 __ cmp(input, Operand(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002888 __ Move(result, input, pl);
Steve Block1e0659c2011-05-24 12:43:12 +01002889 // We can make rsb conditional because the previous cmp instruction
2890 // will clear the V (overflow) flag and rsb won't set this flag
2891 // if input is positive.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002892 __ rsb(result, input, Operand(0), SetCC, mi);
Steve Block1e0659c2011-05-24 12:43:12 +01002893 // Deoptimize on overflow.
2894 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002895}
2896
2897
2898void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002899 // Class for deferred case.
2900 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2901 public:
2902 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2903 LUnaryMathOperation* instr)
2904 : LDeferredCode(codegen), instr_(instr) { }
2905 virtual void Generate() {
2906 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2907 }
2908 private:
2909 LUnaryMathOperation* instr_;
2910 };
2911
Steve Block1e0659c2011-05-24 12:43:12 +01002912 Representation r = instr->hydrogen()->value()->representation();
2913 if (r.IsDouble()) {
2914 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002915 DwVfpRegister result = ToDoubleRegister(instr->result());
2916 __ vabs(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01002917 } else if (r.IsInteger32()) {
2918 EmitIntegerMathAbs(instr);
2919 } else {
2920 // Representation is tagged.
2921 DeferredMathAbsTaggedHeapNumber* deferred =
2922 new DeferredMathAbsTaggedHeapNumber(this, instr);
2923 Register input = ToRegister(instr->InputAt(0));
2924 // Smi check.
2925 __ JumpIfNotSmi(input, deferred->entry());
2926 // If smi, handle it directly.
2927 EmitIntegerMathAbs(instr);
2928 __ bind(deferred->exit());
2929 }
2930}
2931
2932
Ben Murdochb0fe1622011-05-05 13:52:32 +01002933void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002934 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002935 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002936 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002937 Register scratch1 = scratch0();
2938 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002939
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002940 __ EmitVFPTruncate(kRoundToMinusInf,
2941 single_scratch,
2942 input,
2943 scratch1,
2944 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002945 DeoptimizeIf(ne, instr->environment());
2946
2947 // Move the result back to general purpose register r0.
2948 __ vmov(result, single_scratch);
2949
Steve Block44f0eee2011-05-26 01:26:41 +01002950 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2951 // Test for -0.
2952 Label done;
2953 __ cmp(result, Operand(0));
2954 __ b(ne, &done);
2955 __ vmov(scratch1, input.high());
2956 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2957 DeoptimizeIf(ne, instr->environment());
2958 __ bind(&done);
2959 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002960}
2961
2962
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002963void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2964 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2965 Register result = ToRegister(instr->result());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002966 Register scratch = scratch0();
Ben Murdoch257744e2011-11-30 15:57:28 +00002967 Label done, check_sign_on_zero;
2968
2969 // Extract exponent bits.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002970 __ vmov(result, input.high());
2971 __ ubfx(scratch,
2972 result,
Ben Murdoch257744e2011-11-30 15:57:28 +00002973 HeapNumber::kExponentShift,
2974 HeapNumber::kExponentBits);
2975
2976 // If the number is in ]-0.5, +0.5[, the result is +/- 0.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002977 __ cmp(scratch, Operand(HeapNumber::kExponentBias - 2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002978 __ mov(result, Operand(0), LeaveCC, le);
2979 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2980 __ b(le, &check_sign_on_zero);
2981 } else {
2982 __ b(le, &done);
2983 }
2984
2985 // The following conversion will not work with numbers
2986 // outside of ]-2^32, 2^32[.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002987 __ cmp(scratch, Operand(HeapNumber::kExponentBias + 32));
Ben Murdoch257744e2011-11-30 15:57:28 +00002988 DeoptimizeIf(ge, instr->environment());
2989
2990 // Save the original sign for later comparison.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002991 __ and_(scratch, result, Operand(HeapNumber::kSignMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002992
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002993 __ Vmov(double_scratch0(), 0.5);
Ben Murdoch257744e2011-11-30 15:57:28 +00002994 __ vadd(input, input, double_scratch0());
2995
2996 // Check sign of the result: if the sign changed, the input
2997 // value was in ]0.5, 0[ and the result should be -0.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002998 __ vmov(result, input.high());
2999 __ eor(result, result, Operand(scratch), SetCC);
Ben Murdoch257744e2011-11-30 15:57:28 +00003000 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3001 DeoptimizeIf(mi, instr->environment());
3002 } else {
3003 __ mov(result, Operand(0), LeaveCC, mi);
3004 __ b(mi, &done);
3005 }
3006
3007 __ EmitVFPTruncate(kRoundToMinusInf,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003008 double_scratch0().low(),
3009 input,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003010 result,
3011 scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003012 DeoptimizeIf(ne, instr->environment());
3013 __ vmov(result, double_scratch0().low());
3014
Steve Block44f0eee2011-05-26 01:26:41 +01003015 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3016 // Test for -0.
Steve Block44f0eee2011-05-26 01:26:41 +01003017 __ cmp(result, Operand(0));
3018 __ b(ne, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003019 __ bind(&check_sign_on_zero);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003020 __ vmov(scratch, input.high());
3021 __ tst(scratch, Operand(HeapNumber::kSignMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003022 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003023 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003024 __ bind(&done);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003025}
3026
3027
Ben Murdochb0fe1622011-05-05 13:52:32 +01003028void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003029 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003030 DoubleRegister result = ToDoubleRegister(instr->result());
3031 __ vsqrt(result, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003032}
3033
3034
Steve Block44f0eee2011-05-26 01:26:41 +01003035void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3036 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003037 DoubleRegister result = ToDoubleRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +01003038 // Add +0 to convert -0 to +0.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003039 __ vadd(result, input, kDoubleRegZero);
3040 __ vsqrt(result, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003041}
3042
3043
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003044void LCodeGen::DoPower(LPower* instr) {
3045 LOperand* left = instr->InputAt(0);
3046 LOperand* right = instr->InputAt(1);
3047 Register scratch = scratch0();
3048 DoubleRegister result_reg = ToDoubleRegister(instr->result());
3049 Representation exponent_type = instr->hydrogen()->right()->representation();
3050 if (exponent_type.IsDouble()) {
3051 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003052 __ PrepareCallCFunction(0, 2, scratch);
3053 __ SetCallCDoubleArguments(ToDoubleRegister(left),
3054 ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003055 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003056 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003057 } else if (exponent_type.IsInteger32()) {
3058 ASSERT(ToRegister(right).is(r0));
3059 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003060 __ PrepareCallCFunction(1, 1, scratch);
3061 __ SetCallCDoubleArguments(ToDoubleRegister(left), ToRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003062 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003063 ExternalReference::power_double_int_function(isolate()), 1, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003064 } else {
3065 ASSERT(exponent_type.IsTagged());
3066 ASSERT(instr->hydrogen()->left()->representation().IsDouble());
3067
3068 Register right_reg = ToRegister(right);
3069
3070 // Check for smi on the right hand side.
3071 Label non_smi, call;
3072 __ JumpIfNotSmi(right_reg, &non_smi);
3073
3074 // Untag smi and convert it to a double.
3075 __ SmiUntag(right_reg);
3076 SwVfpRegister single_scratch = double_scratch0().low();
3077 __ vmov(single_scratch, right_reg);
3078 __ vcvt_f64_s32(result_reg, single_scratch);
3079 __ jmp(&call);
3080
3081 // Heap number map check.
3082 __ bind(&non_smi);
3083 __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
3084 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3085 __ cmp(scratch, Operand(ip));
3086 DeoptimizeIf(ne, instr->environment());
3087 int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
3088 __ add(scratch, right_reg, Operand(value_offset));
3089 __ vldr(result_reg, scratch, 0);
3090
3091 // Prepare arguments and call C function.
3092 __ bind(&call);
Ben Murdoch257744e2011-11-30 15:57:28 +00003093 __ PrepareCallCFunction(0, 2, scratch);
3094 __ SetCallCDoubleArguments(ToDoubleRegister(left), result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01003095 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003096 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003097 }
3098 // Store the result in the result register.
3099 __ GetCFunctionDoubleResult(result_reg);
3100}
3101
3102
3103void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3104 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3105 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3106 TranscendentalCacheStub::UNTAGGED);
3107 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3108}
3109
3110
3111void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3112 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3113 TranscendentalCacheStub stub(TranscendentalCache::COS,
3114 TranscendentalCacheStub::UNTAGGED);
3115 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3116}
3117
3118
3119void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3120 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3121 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3122 TranscendentalCacheStub::UNTAGGED);
3123 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3124}
3125
3126
Ben Murdochb0fe1622011-05-05 13:52:32 +01003127void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3128 switch (instr->op()) {
3129 case kMathAbs:
3130 DoMathAbs(instr);
3131 break;
3132 case kMathFloor:
3133 DoMathFloor(instr);
3134 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003135 case kMathRound:
3136 DoMathRound(instr);
3137 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003138 case kMathSqrt:
3139 DoMathSqrt(instr);
3140 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003141 case kMathPowHalf:
3142 DoMathPowHalf(instr);
3143 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003144 case kMathCos:
3145 DoMathCos(instr);
3146 break;
3147 case kMathSin:
3148 DoMathSin(instr);
3149 break;
3150 case kMathLog:
3151 DoMathLog(instr);
3152 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003153 default:
3154 Abort("Unimplemented type of LUnaryMathOperation.");
3155 UNREACHABLE();
3156 }
3157}
3158
3159
Ben Murdoch257744e2011-11-30 15:57:28 +00003160void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3161 ASSERT(ToRegister(instr->function()).is(r1));
3162 ASSERT(instr->HasPointerMap());
3163 ASSERT(instr->HasDeoptimizationEnvironment());
3164 LPointerMap* pointers = instr->pointer_map();
3165 LEnvironment* env = instr->deoptimization_environment();
3166 RecordPosition(pointers->position());
3167 RegisterEnvironmentForDeoptimization(env);
3168 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3169 ParameterCount count(instr->arity());
3170 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3171 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3172}
3173
3174
Ben Murdochb0fe1622011-05-05 13:52:32 +01003175void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003176 ASSERT(ToRegister(instr->result()).is(r0));
3177
3178 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003179 Handle<Code> ic =
3180 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003181 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3182 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003183}
3184
3185
3186void LCodeGen::DoCallNamed(LCallNamed* instr) {
3187 ASSERT(ToRegister(instr->result()).is(r0));
3188
3189 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003190 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3191 Handle<Code> ic =
3192 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003193 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003194 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003195 // Restore context register.
3196 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3197}
3198
3199
3200void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003201 ASSERT(ToRegister(instr->result()).is(r0));
3202
3203 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003204 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
Steve Block9fac8402011-05-12 15:51:54 +01003205 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3206 __ Drop(1);
3207 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003208}
3209
3210
3211void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003212 ASSERT(ToRegister(instr->result()).is(r0));
3213
3214 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003215 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
Steve Block44f0eee2011-05-26 01:26:41 +01003216 Handle<Code> ic =
Ben Murdoch257744e2011-11-30 15:57:28 +00003217 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003218 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003219 CallCode(ic, mode, instr);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003220 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003221}
3222
3223
3224void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3225 ASSERT(ToRegister(instr->result()).is(r0));
3226 __ mov(r1, Operand(instr->target()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003227 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003228}
3229
3230
3231void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003232 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003233 ASSERT(ToRegister(instr->result()).is(r0));
3234
Steve Block44f0eee2011-05-26 01:26:41 +01003235 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003236 __ mov(r0, Operand(instr->arity()));
3237 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3238}
3239
3240
3241void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3242 CallRuntime(instr->function(), instr->arity(), instr);
3243}
3244
3245
3246void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003247 Register object = ToRegister(instr->object());
3248 Register value = ToRegister(instr->value());
3249 Register scratch = scratch0();
3250 int offset = instr->offset();
3251
3252 ASSERT(!object.is(value));
3253
3254 if (!instr->transition().is_null()) {
3255 __ mov(scratch, Operand(instr->transition()));
3256 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3257 }
3258
3259 // Do the store.
3260 if (instr->is_in_object()) {
3261 __ str(value, FieldMemOperand(object, offset));
3262 if (instr->needs_write_barrier()) {
3263 // Update the write barrier for the object for in-object properties.
3264 __ RecordWrite(object, Operand(offset), value, scratch);
3265 }
3266 } else {
3267 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3268 __ str(value, FieldMemOperand(scratch, offset));
3269 if (instr->needs_write_barrier()) {
3270 // Update the write barrier for the properties array.
3271 // object is used as a scratch register.
3272 __ RecordWrite(scratch, Operand(offset), value, object);
3273 }
3274 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003275}
3276
3277
3278void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3279 ASSERT(ToRegister(instr->object()).is(r1));
3280 ASSERT(ToRegister(instr->value()).is(r0));
3281
3282 // Name is always in r2.
3283 __ mov(r2, Operand(instr->name()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003284 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003285 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3286 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003287 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3288}
3289
3290
3291void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003292 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003293 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003294}
3295
3296
3297void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003298 Register value = ToRegister(instr->value());
3299 Register elements = ToRegister(instr->object());
3300 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3301 Register scratch = scratch0();
3302
3303 // Do the store.
3304 if (instr->key()->IsConstantOperand()) {
3305 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3306 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3307 int offset =
3308 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3309 __ str(value, FieldMemOperand(elements, offset));
3310 } else {
3311 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3312 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3313 }
3314
3315 if (instr->hydrogen()->NeedsWriteBarrier()) {
3316 // Compute address of modified element and store it into key register.
3317 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
3318 __ RecordWrite(elements, key, value);
3319 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003320}
3321
3322
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003323void LCodeGen::DoStoreKeyedFastDoubleElement(
3324 LStoreKeyedFastDoubleElement* instr) {
3325 DwVfpRegister value = ToDoubleRegister(instr->value());
3326 Register elements = ToRegister(instr->elements());
3327 Register key = no_reg;
3328 Register scratch = scratch0();
3329 bool key_is_constant = instr->key()->IsConstantOperand();
3330 int constant_key = 0;
3331 Label not_nan;
3332
3333 // Calculate the effective address of the slot in the array to store the
3334 // double value.
3335 if (key_is_constant) {
3336 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3337 if (constant_key & 0xF0000000) {
3338 Abort("array index constant value too big.");
3339 }
3340 } else {
3341 key = ToRegister(instr->key());
3342 }
3343 int shift_size = ElementsKindToShiftSize(JSObject::FAST_DOUBLE_ELEMENTS);
3344 Operand operand = key_is_constant
3345 ? Operand(constant_key * (1 << shift_size) +
3346 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
3347 : Operand(key, LSL, shift_size);
3348 __ add(scratch, elements, operand);
3349 if (!key_is_constant) {
3350 __ add(scratch, scratch,
3351 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3352 }
3353
3354 // Check for NaN. All NaNs must be canonicalized.
3355 __ VFPCompareAndSetFlags(value, value);
3356
3357 // Only load canonical NaN if the comparison above set the overflow.
3358 __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs);
3359
3360 __ bind(&not_nan);
3361 __ vstr(value, scratch, 0);
3362}
3363
3364
Steve Block44f0eee2011-05-26 01:26:41 +01003365void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3366 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003367
3368 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00003369 Register key = no_reg;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003370 JSObject::ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00003371 bool key_is_constant = instr->key()->IsConstantOperand();
3372 int constant_key = 0;
3373 if (key_is_constant) {
3374 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3375 if (constant_key & 0xF0000000) {
3376 Abort("array index constant value too big.");
3377 }
3378 } else {
3379 key = ToRegister(instr->key());
3380 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003381 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00003382
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003383 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS ||
3384 elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003385 CpuFeatures::Scope scope(VFP3);
3386 DwVfpRegister value(ToDoubleRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003387 Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
3388 : Operand(key, LSL, shift_size));
3389 __ add(scratch0(), external_pointer, operand);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003390 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003391 __ vcvt_f32_f64(double_scratch0().low(), value);
3392 __ vstr(double_scratch0().low(), scratch0(), 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003393 } else { // i.e. elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00003394 __ vstr(value, scratch0(), 0);
3395 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003396 } else {
3397 Register value(ToRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003398 MemOperand mem_operand(key_is_constant
3399 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
3400 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003401 switch (elements_kind) {
3402 case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3403 case JSObject::EXTERNAL_BYTE_ELEMENTS:
3404 case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003405 __ strb(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003406 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003407 case JSObject::EXTERNAL_SHORT_ELEMENTS:
3408 case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003409 __ strh(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003410 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003411 case JSObject::EXTERNAL_INT_ELEMENTS:
3412 case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003413 __ str(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003414 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003415 case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3416 case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3417 case JSObject::FAST_DOUBLE_ELEMENTS:
3418 case JSObject::FAST_ELEMENTS:
3419 case JSObject::DICTIONARY_ELEMENTS:
3420 case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01003421 UNREACHABLE();
3422 break;
3423 }
3424 }
Steve Block44f0eee2011-05-26 01:26:41 +01003425}
3426
3427
Ben Murdochb0fe1622011-05-05 13:52:32 +01003428void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3429 ASSERT(ToRegister(instr->object()).is(r2));
3430 ASSERT(ToRegister(instr->key()).is(r1));
3431 ASSERT(ToRegister(instr->value()).is(r0));
3432
Ben Murdoch8b112d22011-06-08 16:22:53 +01003433 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003434 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3435 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003436 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3437}
3438
3439
Ben Murdoch257744e2011-11-30 15:57:28 +00003440void LCodeGen::DoStringAdd(LStringAdd* instr) {
3441 __ push(ToRegister(instr->left()));
3442 __ push(ToRegister(instr->right()));
3443 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3444 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3445}
3446
3447
Steve Block1e0659c2011-05-24 12:43:12 +01003448void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3449 class DeferredStringCharCodeAt: public LDeferredCode {
3450 public:
3451 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3452 : LDeferredCode(codegen), instr_(instr) { }
3453 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3454 private:
3455 LStringCharCodeAt* instr_;
3456 };
3457
Steve Block1e0659c2011-05-24 12:43:12 +01003458 Register string = ToRegister(instr->string());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003459 Register index = ToRegister(instr->index());
Steve Block1e0659c2011-05-24 12:43:12 +01003460 Register result = ToRegister(instr->result());
3461
3462 DeferredStringCharCodeAt* deferred =
3463 new DeferredStringCharCodeAt(this, instr);
3464
Steve Block1e0659c2011-05-24 12:43:12 +01003465 // Fetch the instance type of the receiver into result register.
3466 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3467 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3468
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003469 // We need special handling for indirect strings.
3470 Label check_sequential;
3471 __ tst(result, Operand(kIsIndirectStringMask));
3472 __ b(eq, &check_sequential);
Steve Block1e0659c2011-05-24 12:43:12 +01003473
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003474 // Dispatch on the indirect string shape: slice or cons.
3475 Label cons_string;
3476 __ tst(result, Operand(kSlicedNotConsMask));
3477 __ b(eq, &cons_string);
Steve Block1e0659c2011-05-24 12:43:12 +01003478
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003479 // Handle slices.
3480 Label indirect_string_loaded;
3481 __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
3482 __ add(index, index, Operand(result, ASR, kSmiTagSize));
3483 __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
3484 __ jmp(&indirect_string_loaded);
3485
3486 // Handle conses.
Steve Block1e0659c2011-05-24 12:43:12 +01003487 // Check whether the right hand side is the empty string (i.e. if
3488 // this is really a flat string in a cons string). If that is not
3489 // the case we would rather go to the runtime system now to flatten
3490 // the string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003491 __ bind(&cons_string);
3492 __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01003493 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003494 __ cmp(result, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01003495 __ b(ne, deferred->entry());
3496 // Get the first of the two strings and load its instance type.
3497 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003498
3499 __ bind(&indirect_string_loaded);
Steve Block1e0659c2011-05-24 12:43:12 +01003500 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3501 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003502
3503 // Check whether the string is sequential. The only non-sequential
3504 // shapes we support have just been unwrapped above.
3505 __ bind(&check_sequential);
Steve Block1e0659c2011-05-24 12:43:12 +01003506 STATIC_ASSERT(kSeqStringTag == 0);
3507 __ tst(result, Operand(kStringRepresentationMask));
3508 __ b(ne, deferred->entry());
3509
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003510 // Dispatch on the encoding: ASCII or two-byte.
3511 Label ascii_string;
Steve Block1e0659c2011-05-24 12:43:12 +01003512 STATIC_ASSERT(kAsciiStringTag != 0);
3513 __ tst(result, Operand(kStringEncodingMask));
3514 __ b(ne, &ascii_string);
3515
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003516 // Two-byte string.
3517 // Load the two-byte character code into the result register.
3518 Label done;
3519 __ add(result,
3520 string,
3521 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3522 __ ldrh(result, MemOperand(result, index, LSL, 1));
Steve Block1e0659c2011-05-24 12:43:12 +01003523 __ jmp(&done);
3524
3525 // ASCII string.
3526 // Load the byte into the result register.
3527 __ bind(&ascii_string);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003528 __ add(result,
3529 string,
3530 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3531 __ ldrb(result, MemOperand(result, index));
3532
Steve Block1e0659c2011-05-24 12:43:12 +01003533 __ bind(&done);
3534 __ bind(deferred->exit());
3535}
3536
3537
3538void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3539 Register string = ToRegister(instr->string());
3540 Register result = ToRegister(instr->result());
3541 Register scratch = scratch0();
3542
3543 // TODO(3095996): Get rid of this. For now, we need to make the
3544 // result register contain a valid pointer because it is already
3545 // contained in the register pointer map.
3546 __ mov(result, Operand(0));
3547
Ben Murdoch8b112d22011-06-08 16:22:53 +01003548 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003549 __ push(string);
3550 // Push the index as a smi. This is safe because of the checks in
3551 // DoStringCharCodeAt above.
3552 if (instr->index()->IsConstantOperand()) {
3553 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3554 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3555 __ push(scratch);
3556 } else {
3557 Register index = ToRegister(instr->index());
3558 __ SmiTag(index);
3559 __ push(index);
3560 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003561 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003562 if (FLAG_debug_code) {
3563 __ AbortIfNotSmi(r0);
3564 }
3565 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003566 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003567}
3568
3569
Steve Block44f0eee2011-05-26 01:26:41 +01003570void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3571 class DeferredStringCharFromCode: public LDeferredCode {
3572 public:
3573 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3574 : LDeferredCode(codegen), instr_(instr) { }
3575 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3576 private:
3577 LStringCharFromCode* instr_;
3578 };
3579
3580 DeferredStringCharFromCode* deferred =
3581 new DeferredStringCharFromCode(this, instr);
3582
3583 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3584 Register char_code = ToRegister(instr->char_code());
3585 Register result = ToRegister(instr->result());
3586 ASSERT(!char_code.is(result));
3587
3588 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3589 __ b(hi, deferred->entry());
3590 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3591 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3592 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3593 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3594 __ cmp(result, ip);
3595 __ b(eq, deferred->entry());
3596 __ bind(deferred->exit());
3597}
3598
3599
3600void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3601 Register char_code = ToRegister(instr->char_code());
3602 Register result = ToRegister(instr->result());
3603
3604 // TODO(3095996): Get rid of this. For now, we need to make the
3605 // result register contain a valid pointer because it is already
3606 // contained in the register pointer map.
3607 __ mov(result, Operand(0));
3608
Ben Murdoch8b112d22011-06-08 16:22:53 +01003609 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003610 __ SmiTag(char_code);
3611 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003612 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003613 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003614}
3615
3616
Steve Block1e0659c2011-05-24 12:43:12 +01003617void LCodeGen::DoStringLength(LStringLength* instr) {
3618 Register string = ToRegister(instr->InputAt(0));
3619 Register result = ToRegister(instr->result());
3620 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3621}
3622
3623
Ben Murdochb0fe1622011-05-05 13:52:32 +01003624void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003625 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003626 ASSERT(input->IsRegister() || input->IsStackSlot());
3627 LOperand* output = instr->result();
3628 ASSERT(output->IsDoubleRegister());
3629 SwVfpRegister single_scratch = double_scratch0().low();
3630 if (input->IsStackSlot()) {
3631 Register scratch = scratch0();
3632 __ ldr(scratch, ToMemOperand(input));
3633 __ vmov(single_scratch, scratch);
3634 } else {
3635 __ vmov(single_scratch, ToRegister(input));
3636 }
3637 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003638}
3639
3640
3641void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3642 class DeferredNumberTagI: public LDeferredCode {
3643 public:
3644 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3645 : LDeferredCode(codegen), instr_(instr) { }
3646 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3647 private:
3648 LNumberTagI* instr_;
3649 };
3650
Steve Block1e0659c2011-05-24 12:43:12 +01003651 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003652 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3653 Register reg = ToRegister(input);
3654
3655 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3656 __ SmiTag(reg, SetCC);
3657 __ b(vs, deferred->entry());
3658 __ bind(deferred->exit());
3659}
3660
3661
3662void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3663 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003664 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003665 DoubleRegister dbl_scratch = double_scratch0();
3666 SwVfpRegister flt_scratch = dbl_scratch.low();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003667
3668 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003669 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003670
3671 // There was overflow, so bits 30 and 31 of the original integer
3672 // disagree. Try to allocate a heap number in new space and store
3673 // the value in there. If that fails, call the runtime system.
3674 Label done;
3675 __ SmiUntag(reg);
3676 __ eor(reg, reg, Operand(0x80000000));
3677 __ vmov(flt_scratch, reg);
3678 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3679 if (FLAG_inline_new) {
3680 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3681 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3682 if (!reg.is(r5)) __ mov(reg, r5);
3683 __ b(&done);
3684 }
3685
3686 // Slow case: Call the runtime system to do the number allocation.
3687 __ bind(&slow);
3688
3689 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3690 // register is stored, as this register is in the pointer map, but contains an
3691 // integer value.
3692 __ mov(ip, Operand(0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003693 __ StoreToSafepointRegisterSlot(ip, reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003694 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003695 if (!reg.is(r0)) __ mov(reg, r0);
3696
3697 // Done. Put the value in dbl_scratch into the value of the allocated heap
3698 // number.
3699 __ bind(&done);
3700 __ sub(ip, reg, Operand(kHeapObjectTag));
3701 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003702 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003703}
3704
3705
3706void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3707 class DeferredNumberTagD: public LDeferredCode {
3708 public:
3709 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3710 : LDeferredCode(codegen), instr_(instr) { }
3711 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3712 private:
3713 LNumberTagD* instr_;
3714 };
3715
Steve Block1e0659c2011-05-24 12:43:12 +01003716 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003717 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003718 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003719 Register temp1 = ToRegister(instr->TempAt(0));
3720 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003721
3722 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3723 if (FLAG_inline_new) {
3724 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3725 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3726 } else {
3727 __ jmp(deferred->entry());
3728 }
3729 __ bind(deferred->exit());
3730 __ sub(ip, reg, Operand(kHeapObjectTag));
3731 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3732}
3733
3734
3735void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3736 // TODO(3095996): Get rid of this. For now, we need to make the
3737 // result register contain a valid pointer because it is already
3738 // contained in the register pointer map.
3739 Register reg = ToRegister(instr->result());
3740 __ mov(reg, Operand(0));
3741
Ben Murdoch8b112d22011-06-08 16:22:53 +01003742 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3743 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003744 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003745}
3746
3747
3748void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003749 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003750 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3751 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3752 __ SmiTag(ToRegister(input));
3753}
3754
3755
3756void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003757 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003758 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3759 if (instr->needs_check()) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003760 STATIC_ASSERT(kHeapObjectTag == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003761 // If the input is a HeapObject, SmiUntag will set the carry flag.
3762 __ SmiUntag(ToRegister(input), SetCC);
3763 DeoptimizeIf(cs, instr->environment());
3764 } else {
3765 __ SmiUntag(ToRegister(input));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003766 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003767}
3768
3769
3770void LCodeGen::EmitNumberUntagD(Register input_reg,
3771 DoubleRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003772 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003773 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003774 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003775 SwVfpRegister flt_scratch = double_scratch0().low();
3776 ASSERT(!result_reg.is(double_scratch0()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003777
3778 Label load_smi, heap_number, done;
3779
3780 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003781 __ JumpIfSmi(input_reg, &load_smi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003782
3783 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003784 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003785 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003786 __ cmp(scratch, Operand(ip));
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003787 if (deoptimize_on_undefined) {
3788 DeoptimizeIf(ne, env);
3789 } else {
3790 Label heap_number;
3791 __ b(eq, &heap_number);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003792
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003793 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3794 __ cmp(input_reg, Operand(ip));
3795 DeoptimizeIf(ne, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003796
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003797 // Convert undefined to NaN.
3798 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3799 __ sub(ip, ip, Operand(kHeapObjectTag));
3800 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3801 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003802
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003803 __ bind(&heap_number);
3804 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003805 // Heap number to double register conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003806 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3807 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3808 __ jmp(&done);
3809
3810 // Smi to double register conversion
3811 __ bind(&load_smi);
3812 __ SmiUntag(input_reg); // Untag smi before converting to float.
3813 __ vmov(flt_scratch, input_reg);
3814 __ vcvt_f64_s32(result_reg, flt_scratch);
3815 __ SmiTag(input_reg); // Retag smi.
3816 __ bind(&done);
3817}
3818
3819
3820class DeferredTaggedToI: public LDeferredCode {
3821 public:
3822 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3823 : LDeferredCode(codegen), instr_(instr) { }
3824 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3825 private:
3826 LTaggedToI* instr_;
3827};
3828
3829
3830void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003831 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003832 Register scratch1 = scratch0();
3833 Register scratch2 = ToRegister(instr->TempAt(0));
3834 DwVfpRegister double_scratch = double_scratch0();
3835 SwVfpRegister single_scratch = double_scratch.low();
3836
3837 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3838 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3839
3840 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003841
Ben Murdoch257744e2011-11-30 15:57:28 +00003842 // The input was optimistically untagged; revert it.
3843 // The carry flag is set when we reach this deferred code as we just executed
3844 // SmiUntag(heap_object, SetCC)
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003845 STATIC_ASSERT(kHeapObjectTag == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003846 __ adc(input_reg, input_reg, Operand(input_reg));
3847
Ben Murdochb0fe1622011-05-05 13:52:32 +01003848 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01003849 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003850 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01003851 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003852
3853 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003854 Register scratch3 = ToRegister(instr->TempAt(1));
3855 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3856 ASSERT(!scratch3.is(input_reg) &&
3857 !scratch3.is(scratch1) &&
3858 !scratch3.is(scratch2));
3859 // Performs a truncating conversion of a floating point number as used by
3860 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003861 Label heap_number;
3862 __ b(eq, &heap_number);
3863 // Check for undefined. Undefined is converted to zero for truncating
3864 // conversions.
3865 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3866 __ cmp(input_reg, Operand(ip));
3867 DeoptimizeIf(ne, instr->environment());
3868 __ mov(input_reg, Operand(0));
3869 __ b(&done);
3870
3871 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01003872 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3873 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3874
3875 __ EmitECMATruncate(input_reg,
3876 double_scratch2,
3877 single_scratch,
3878 scratch1,
3879 scratch2,
3880 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003881
3882 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003883 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003884 // Deoptimize if we don't have a heap number.
3885 DeoptimizeIf(ne, instr->environment());
3886
3887 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003888 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3889 __ EmitVFPTruncate(kRoundToZero,
3890 single_scratch,
3891 double_scratch,
3892 scratch1,
3893 scratch2,
3894 kCheckForInexactConversion);
3895 DeoptimizeIf(ne, instr->environment());
3896 // Load the result.
3897 __ vmov(input_reg, single_scratch);
3898
Ben Murdochb0fe1622011-05-05 13:52:32 +01003899 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003900 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003901 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01003902 __ vmov(scratch1, double_scratch.high());
3903 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003904 DeoptimizeIf(ne, instr->environment());
3905 }
3906 }
3907 __ bind(&done);
3908}
3909
3910
3911void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003912 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003913 ASSERT(input->IsRegister());
3914 ASSERT(input->Equals(instr->result()));
3915
3916 Register input_reg = ToRegister(input);
3917
3918 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3919
Ben Murdoch257744e2011-11-30 15:57:28 +00003920 // Optimistically untag the input.
3921 // If the input is a HeapObject, SmiUntag will set the carry flag.
3922 __ SmiUntag(input_reg, SetCC);
3923 // Branch to deferred code if the input was tagged.
3924 // The deferred code will take care of restoring the tag.
3925 __ b(cs, deferred->entry());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003926 __ bind(deferred->exit());
3927}
3928
3929
3930void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003931 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003932 ASSERT(input->IsRegister());
3933 LOperand* result = instr->result();
3934 ASSERT(result->IsDoubleRegister());
3935
3936 Register input_reg = ToRegister(input);
3937 DoubleRegister result_reg = ToDoubleRegister(result);
3938
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003939 EmitNumberUntagD(input_reg, result_reg,
3940 instr->hydrogen()->deoptimize_on_undefined(),
3941 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003942}
3943
3944
3945void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003946 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003947 Register scratch1 = scratch0();
3948 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003949 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003950 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003951
Steve Block44f0eee2011-05-26 01:26:41 +01003952 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003953
Steve Block44f0eee2011-05-26 01:26:41 +01003954 if (instr->truncating()) {
3955 Register scratch3 = ToRegister(instr->TempAt(1));
3956 __ EmitECMATruncate(result_reg,
3957 double_input,
3958 single_scratch,
3959 scratch1,
3960 scratch2,
3961 scratch3);
3962 } else {
3963 VFPRoundingMode rounding_mode = kRoundToMinusInf;
3964 __ EmitVFPTruncate(rounding_mode,
3965 single_scratch,
3966 double_input,
3967 scratch1,
3968 scratch2,
3969 kCheckForInexactConversion);
3970 // Deoptimize if we had a vfp invalid exception,
3971 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01003972 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003973 // Retrieve the result.
3974 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003975 }
Steve Block44f0eee2011-05-26 01:26:41 +01003976 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003977}
3978
3979
3980void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003981 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003982 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003983 DeoptimizeIf(ne, instr->environment());
3984}
3985
3986
3987void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3988 LOperand* input = instr->InputAt(0);
3989 __ tst(ToRegister(input), Operand(kSmiTagMask));
3990 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003991}
3992
3993
3994void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003995 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003996 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01003997
3998 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3999 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004000
Ben Murdoch257744e2011-11-30 15:57:28 +00004001 if (instr->hydrogen()->is_interval_check()) {
4002 InstanceType first;
4003 InstanceType last;
4004 instr->hydrogen()->GetCheckInterval(&first, &last);
4005
4006 __ cmp(scratch, Operand(first));
4007
4008 // If there is only one type in the interval check for equality.
4009 if (first == last) {
4010 DeoptimizeIf(ne, instr->environment());
4011 } else {
4012 DeoptimizeIf(lo, instr->environment());
4013 // Omit check for the last type.
4014 if (last != LAST_TYPE) {
4015 __ cmp(scratch, Operand(last));
4016 DeoptimizeIf(hi, instr->environment());
4017 }
4018 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01004019 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004020 uint8_t mask;
4021 uint8_t tag;
4022 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4023
4024 if (IsPowerOf2(mask)) {
4025 ASSERT(tag == 0 || IsPowerOf2(tag));
4026 __ tst(scratch, Operand(mask));
4027 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment());
4028 } else {
4029 __ and_(scratch, scratch, Operand(mask));
4030 __ cmp(scratch, Operand(tag));
4031 DeoptimizeIf(ne, instr->environment());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004032 }
4033 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004034}
4035
4036
4037void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004038 ASSERT(instr->InputAt(0)->IsRegister());
4039 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004040 __ cmp(reg, Operand(instr->hydrogen()->target()));
4041 DeoptimizeIf(ne, instr->environment());
4042}
4043
4044
4045void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004046 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01004047 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004048 ASSERT(input->IsRegister());
4049 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01004050 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
4051 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004052 DeoptimizeIf(ne, instr->environment());
4053}
4054
4055
Ben Murdoch257744e2011-11-30 15:57:28 +00004056void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4057 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
4058 Register result_reg = ToRegister(instr->result());
4059 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4060 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
4061}
4062
4063
4064void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4065 Register unclamped_reg = ToRegister(instr->unclamped());
4066 Register result_reg = ToRegister(instr->result());
4067 __ ClampUint8(result_reg, unclamped_reg);
4068}
4069
4070
4071void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4072 Register scratch = scratch0();
4073 Register input_reg = ToRegister(instr->unclamped());
4074 Register result_reg = ToRegister(instr->result());
4075 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4076 Label is_smi, done, heap_number;
4077
4078 // Both smi and heap number cases are handled.
4079 __ JumpIfSmi(input_reg, &is_smi);
4080
4081 // Check for heap number
4082 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4083 __ cmp(scratch, Operand(factory()->heap_number_map()));
4084 __ b(eq, &heap_number);
4085
4086 // Check for undefined. Undefined is converted to zero for clamping
4087 // conversions.
4088 __ cmp(input_reg, Operand(factory()->undefined_value()));
4089 DeoptimizeIf(ne, instr->environment());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004090 __ mov(result_reg, Operand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00004091 __ jmp(&done);
4092
4093 // Heap number
4094 __ bind(&heap_number);
4095 __ vldr(double_scratch0(), FieldMemOperand(input_reg,
4096 HeapNumber::kValueOffset));
4097 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
4098 __ jmp(&done);
4099
4100 // smi
4101 __ bind(&is_smi);
4102 __ SmiUntag(result_reg, input_reg);
4103 __ ClampUint8(result_reg, result_reg);
4104
4105 __ bind(&done);
4106}
4107
4108
Ben Murdochb8e0da22011-05-16 14:20:40 +01004109void LCodeGen::LoadHeapObject(Register result,
4110 Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01004111 if (heap()->InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01004112 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01004113 factory()->NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01004114 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01004115 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01004116 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004117 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01004118 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004119}
4120
4121
4122void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004123 Register temp1 = ToRegister(instr->TempAt(0));
4124 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01004125
4126 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01004127 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01004128
4129 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004130 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004131
4132 // Check prototype maps up to the holder.
4133 while (!current_prototype.is_identical_to(holder)) {
4134 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4135 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4136 DeoptimizeIf(ne, instr->environment());
4137 current_prototype =
4138 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4139 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004140 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004141 }
4142
4143 // Check the holder map.
4144 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4145 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4146 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004147}
4148
4149
4150void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004151 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4152 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4153 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4154 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
4155 __ Push(r3, r2, r1);
4156
4157 // Pick the right runtime function or stub to call.
4158 int length = instr->hydrogen()->length();
4159 if (instr->hydrogen()->IsCopyOnWrite()) {
4160 ASSERT(instr->hydrogen()->depth() == 1);
4161 FastCloneShallowArrayStub::Mode mode =
4162 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4163 FastCloneShallowArrayStub stub(mode, length);
4164 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4165 } else if (instr->hydrogen()->depth() > 1) {
4166 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4167 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4168 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4169 } else {
4170 FastCloneShallowArrayStub::Mode mode =
4171 FastCloneShallowArrayStub::CLONE_ELEMENTS;
4172 FastCloneShallowArrayStub stub(mode, length);
4173 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4174 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004175}
4176
4177
4178void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004179 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4180 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
4181 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4182 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
4183 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
4184 __ Push(r4, r3, r2, r1);
4185
4186 // Pick the right runtime function to call.
4187 if (instr->hydrogen()->depth() > 1) {
4188 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4189 } else {
4190 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4191 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004192}
4193
4194
Steve Block44f0eee2011-05-26 01:26:41 +01004195void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4196 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
4197 __ push(r0);
4198 CallRuntime(Runtime::kToFastProperties, 1, instr);
4199}
4200
4201
Ben Murdochb0fe1622011-05-05 13:52:32 +01004202void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004203 Label materialized;
4204 // Registers will be used as follows:
4205 // r3 = JS function.
4206 // r7 = literals array.
4207 // r1 = regexp literal.
4208 // r0 = regexp literal clone.
4209 // r2 and r4-r6 are used as temporaries.
4210 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4211 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4212 int literal_offset = FixedArray::kHeaderSize +
4213 instr->hydrogen()->literal_index() * kPointerSize;
4214 __ ldr(r1, FieldMemOperand(r7, literal_offset));
4215 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4216 __ cmp(r1, ip);
4217 __ b(ne, &materialized);
4218
4219 // Create regexp literal using runtime function
4220 // Result will be in r0.
4221 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4222 __ mov(r5, Operand(instr->hydrogen()->pattern()));
4223 __ mov(r4, Operand(instr->hydrogen()->flags()));
4224 __ Push(r7, r6, r5, r4);
4225 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4226 __ mov(r1, r0);
4227
4228 __ bind(&materialized);
4229 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4230 Label allocated, runtime_allocate;
4231
4232 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
4233 __ jmp(&allocated);
4234
4235 __ bind(&runtime_allocate);
4236 __ mov(r0, Operand(Smi::FromInt(size)));
4237 __ Push(r1, r0);
4238 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4239 __ pop(r1);
4240
4241 __ bind(&allocated);
4242 // Copy the content into the newly allocated memory.
4243 // (Unroll copy loop once for better throughput).
4244 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4245 __ ldr(r3, FieldMemOperand(r1, i));
4246 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
4247 __ str(r3, FieldMemOperand(r0, i));
4248 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
4249 }
4250 if ((size % (2 * kPointerSize)) != 0) {
4251 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
4252 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
4253 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004254}
4255
4256
4257void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004258 // Use the fast case closure allocation code that allocates in new
4259 // space for nested functions that don't need literals cloning.
4260 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004261 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004262 if (!pretenure && shared_info->num_literals() == 0) {
4263 FastNewClosureStub stub(
4264 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004265 __ mov(r1, Operand(shared_info));
4266 __ push(r1);
4267 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4268 } else {
4269 __ mov(r2, Operand(shared_info));
4270 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004271 ? factory()->true_value()
4272 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004273 __ Push(cp, r2, r1);
4274 CallRuntime(Runtime::kNewClosure, 3, instr);
4275 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004276}
4277
4278
4279void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004280 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004281 __ push(input);
4282 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004283}
4284
4285
Ben Murdochb0fe1622011-05-05 13:52:32 +01004286void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004287 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004288 int true_block = chunk_->LookupDestination(instr->true_block_id());
4289 int false_block = chunk_->LookupDestination(instr->false_block_id());
4290 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4291 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4292
4293 Condition final_branch_condition = EmitTypeofIs(true_label,
4294 false_label,
4295 input,
4296 instr->type_literal());
4297
4298 EmitBranch(true_block, false_block, final_branch_condition);
4299}
4300
4301
4302Condition LCodeGen::EmitTypeofIs(Label* true_label,
4303 Label* false_label,
4304 Register input,
4305 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004306 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004307 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004308 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004309 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004310 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4311 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4312 __ cmp(input, Operand(ip));
4313 final_branch_condition = eq;
4314
Steve Block44f0eee2011-05-26 01:26:41 +01004315 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004316 __ JumpIfSmi(input, false_label);
4317 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4318 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004319 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4320 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004321 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004322
Steve Block44f0eee2011-05-26 01:26:41 +01004323 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004324 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004325 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004326 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004327 final_branch_condition = eq;
4328
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004329 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4330 __ CompareRoot(input, Heap::kNullValueRootIndex);
4331 final_branch_condition = eq;
4332
Steve Block44f0eee2011-05-26 01:26:41 +01004333 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004334 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004335 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004336 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004337 // Check for undetectable objects => true.
4338 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4339 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4340 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4341 final_branch_condition = ne;
4342
Steve Block44f0eee2011-05-26 01:26:41 +01004343 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004344 __ JumpIfSmi(input, false_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004345 __ CompareObjectType(input, input, scratch,
4346 FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004347 final_branch_condition = ge;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004348
Steve Block44f0eee2011-05-26 01:26:41 +01004349 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004350 __ JumpIfSmi(input, false_label);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004351 if (!FLAG_harmony_typeof) {
4352 __ CompareRoot(input, Heap::kNullValueRootIndex);
4353 __ b(eq, true_label);
4354 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004355 __ CompareObjectType(input, input, scratch,
4356 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4357 __ b(lt, false_label);
4358 __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4359 __ b(gt, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004360 // Check for undetectable objects => false.
4361 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4362 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004363 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004364
4365 } else {
4366 final_branch_condition = ne;
4367 __ b(false_label);
4368 // A dead branch instruction will be generated after this point.
4369 }
4370
4371 return final_branch_condition;
4372}
4373
4374
Steve Block1e0659c2011-05-24 12:43:12 +01004375void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4376 Register temp1 = ToRegister(instr->TempAt(0));
4377 int true_block = chunk_->LookupDestination(instr->true_block_id());
4378 int false_block = chunk_->LookupDestination(instr->false_block_id());
4379
4380 EmitIsConstructCall(temp1, scratch0());
4381 EmitBranch(true_block, false_block, eq);
4382}
4383
4384
4385void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4386 ASSERT(!temp1.is(temp2));
4387 // Get the frame pointer for the calling frame.
4388 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4389
4390 // Skip the arguments adaptor frame if it exists.
4391 Label check_frame_marker;
4392 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4393 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4394 __ b(ne, &check_frame_marker);
4395 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4396
4397 // Check the marker in the calling frame.
4398 __ bind(&check_frame_marker);
4399 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4400 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4401}
4402
4403
Ben Murdochb0fe1622011-05-05 13:52:32 +01004404void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4405 // No code for lazy bailout instruction. Used to capture environment after a
4406 // call for populating the safepoint data with deoptimization data.
4407}
4408
4409
4410void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004411 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004412}
4413
4414
4415void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004416 Register object = ToRegister(instr->object());
4417 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004418 Register strict = scratch0();
4419 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4420 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004421 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4422 LPointerMap* pointers = instr->pointer_map();
4423 LEnvironment* env = instr->deoptimization_environment();
4424 RecordPosition(pointers->position());
4425 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01004426 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01004427 pointers,
4428 env->deoptimization_index());
Ben Murdoch257744e2011-11-30 15:57:28 +00004429 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4430}
4431
4432
4433void LCodeGen::DoIn(LIn* instr) {
4434 Register obj = ToRegister(instr->object());
4435 Register key = ToRegister(instr->key());
4436 __ Push(key, obj);
4437 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4438 LPointerMap* pointers = instr->pointer_map();
4439 LEnvironment* env = instr->deoptimization_environment();
4440 RecordPosition(pointers->position());
4441 RegisterEnvironmentForDeoptimization(env);
4442 SafepointGenerator safepoint_generator(this,
4443 pointers,
4444 env->deoptimization_index());
4445 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004446}
4447
4448
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004449void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4450 {
4451 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4452 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4453 RegisterLazyDeoptimization(
4454 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4455 }
4456
4457 // The gap code includes the restoring of the safepoint registers.
4458 int pc = masm()->pc_offset();
4459 safepoints_.SetPcAfterGap(pc);
4460}
4461
4462
Ben Murdochb0fe1622011-05-05 13:52:32 +01004463void LCodeGen::DoStackCheck(LStackCheck* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004464 class DeferredStackCheck: public LDeferredCode {
4465 public:
4466 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4467 : LDeferredCode(codegen), instr_(instr) { }
4468 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4469 private:
4470 LStackCheck* instr_;
4471 };
4472
4473 if (instr->hydrogen()->is_function_entry()) {
4474 // Perform stack overflow check.
4475 Label done;
4476 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4477 __ cmp(sp, Operand(ip));
4478 __ b(hs, &done);
4479 StackCheckStub stub;
4480 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4481 __ bind(&done);
4482 } else {
4483 ASSERT(instr->hydrogen()->is_backwards_branch());
4484 // Perform stack overflow check if this goto needs it before jumping.
4485 DeferredStackCheck* deferred_stack_check =
4486 new DeferredStackCheck(this, instr);
4487 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4488 __ cmp(sp, Operand(ip));
4489 __ b(lo, deferred_stack_check->entry());
4490 __ bind(instr->done_label());
4491 deferred_stack_check->SetExit(instr->done_label());
4492 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004493}
4494
4495
4496void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004497 // This is a pseudo-instruction that ensures that the environment here is
4498 // properly registered for deoptimization and records the assembler's PC
4499 // offset.
4500 LEnvironment* environment = instr->environment();
4501 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4502 instr->SpilledDoubleRegisterArray());
4503
4504 // If the environment were already registered, we would have no way of
4505 // backpatching it with the spill slot operands.
4506 ASSERT(!environment->HasBeenRegistered());
4507 RegisterEnvironmentForDeoptimization(environment);
4508 ASSERT(osr_pc_offset_ == -1);
4509 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004510}
4511
4512
Ben Murdoch257744e2011-11-30 15:57:28 +00004513
4514
Ben Murdochb0fe1622011-05-05 13:52:32 +01004515#undef __
4516
4517} } // namespace v8::internal