blob: ad8091b5835b389b17ef0cb6b715882b9c36c003 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
43 int deoptimization_index)
44 : codegen_(codegen),
45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { }
47 virtual ~SafepointGenerator() { }
48
Ben Murdoch257744e2011-11-30 15:57:28 +000049 virtual void BeforeCall(int call_size) const {
Steve Block44f0eee2011-05-26 01:26:41 +010050 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65
Ben Murdoch257744e2011-11-30 15:57:28 +000066 virtual void AfterCall() const {
Ben Murdochb0fe1622011-05-05 13:52:32 +010067 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
68 }
69
70 private:
71 LCodeGen* codegen_;
72 LPointerMap* pointers_;
73 int deoptimization_index_;
74};
75
76
77#define __ masm()->
78
79bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused());
82 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3);
84 CpuFeatures::Scope scope2(ARMv7);
85 return GeneratePrologue() &&
86 GenerateBody() &&
87 GenerateDeferredCode() &&
Ben Murdoch257744e2011-11-30 15:57:28 +000088 GenerateDeoptJumpTable() &&
Ben Murdochb0fe1622011-05-05 13:52:32 +010089 GenerateSafepointTable();
90}
91
92
93void LCodeGen::FinishCode(Handle<Code> code) {
94 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000095 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010096 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010097 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010098 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010099}
100
101
102void LCodeGen::Abort(const char* format, ...) {
103 if (FLAG_trace_bailout) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100104 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
105 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100106 va_list arguments;
107 va_start(arguments, format);
108 OS::VPrint(format, arguments);
109 va_end(arguments);
110 PrintF("\n");
111 }
112 status_ = ABORTED;
113}
114
115
116void LCodeGen::Comment(const char* format, ...) {
117 if (!FLAG_code_comments) return;
118 char buffer[4 * KB];
119 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
120 va_list arguments;
121 va_start(arguments, format);
122 builder.AddFormattedList(format, arguments);
123 va_end(arguments);
124
125 // Copy the string before recording it in the assembler to avoid
126 // issues when the stack allocated buffer goes out of scope.
127 size_t length = builder.position();
128 Vector<char> copy = Vector<char>::New(length + 1);
129 memcpy(copy.start(), builder.Finalize(), copy.length());
130 masm()->RecordComment(copy.start());
131}
132
133
134bool LCodeGen::GeneratePrologue() {
135 ASSERT(is_generating());
136
137#ifdef DEBUG
138 if (strlen(FLAG_stop_at) > 0 &&
139 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
140 __ stop("stop_at");
141 }
142#endif
143
144 // r1: Callee's JS function.
145 // cp: Callee's context.
146 // fp: Caller's frame pointer.
147 // lr: Caller's pc.
148
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000149 // Strict mode functions and builtins need to replace the receiver
150 // with undefined when called as functions (without an explicit
151 // receiver object). r5 is zero for method calls and non-zero for
152 // function calls.
153 if (info_->is_strict_mode() || info_->is_native()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000154 Label ok;
155 __ cmp(r5, Operand(0));
156 __ b(eq, &ok);
157 int receiver_offset = scope()->num_parameters() * kPointerSize;
158 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
159 __ str(r2, MemOperand(sp, receiver_offset));
160 __ bind(&ok);
161 }
162
Ben Murdochb0fe1622011-05-05 13:52:32 +0100163 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
164 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
165
166 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000167 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100168 if (slots > 0) {
169 if (FLAG_debug_code) {
170 __ mov(r0, Operand(slots));
171 __ mov(r2, Operand(kSlotsZapValue));
172 Label loop;
173 __ bind(&loop);
174 __ push(r2);
175 __ sub(r0, r0, Operand(1), SetCC);
176 __ b(ne, &loop);
177 } else {
178 __ sub(sp, sp, Operand(slots * kPointerSize));
179 }
180 }
181
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100182 // Possibly allocate a local context.
183 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (heap_slots > 0) {
185 Comment(";;; Allocate local context");
186 // Argument to NewContext is the function, which is in r1.
187 __ push(r1);
188 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
189 FastNewContextStub stub(heap_slots);
190 __ CallStub(&stub);
191 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000192 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100193 }
194 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
195 // Context is returned in both r0 and cp. It replaces the context
196 // passed to us. It's saved in the stack and kept live in cp.
197 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
198 // Copy any necessary parameters into the context.
199 int num_parameters = scope()->num_parameters();
200 for (int i = 0; i < num_parameters; i++) {
201 Slot* slot = scope()->parameter(i)->AsSlot();
202 if (slot != NULL && slot->type() == Slot::CONTEXT) {
203 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204 (num_parameters - 1 - i) * kPointerSize;
205 // Load parameter from stack.
206 __ ldr(r0, MemOperand(fp, parameter_offset));
207 // Store it in the context.
208 __ mov(r1, Operand(Context::SlotOffset(slot->index())));
209 __ str(r0, MemOperand(cp, r1));
210 // Update the write barrier. This clobbers all involved
211 // registers, so we have to use two more registers to avoid
212 // clobbering cp.
213 __ mov(r2, Operand(cp));
214 __ RecordWrite(r2, Operand(r1), r3, r0);
215 }
216 }
217 Comment(";;; End allocate local context");
218 }
219
Ben Murdochb0fe1622011-05-05 13:52:32 +0100220 // Trace the call.
221 if (FLAG_trace) {
222 __ CallRuntime(Runtime::kTraceEnter, 0);
223 }
224 return !is_aborted();
225}
226
227
228bool LCodeGen::GenerateBody() {
229 ASSERT(is_generating());
230 bool emit_instructions = true;
231 for (current_instruction_ = 0;
232 !is_aborted() && current_instruction_ < instructions_->length();
233 current_instruction_++) {
234 LInstruction* instr = instructions_->at(current_instruction_);
235 if (instr->IsLabel()) {
236 LLabel* label = LLabel::cast(instr);
237 emit_instructions = !label->HasReplacement();
238 }
239
240 if (emit_instructions) {
241 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
242 instr->CompileToNative(this);
243 }
244 }
245 return !is_aborted();
246}
247
248
249LInstruction* LCodeGen::GetNextInstruction() {
250 if (current_instruction_ < instructions_->length() - 1) {
251 return instructions_->at(current_instruction_ + 1);
252 } else {
253 return NULL;
254 }
255}
256
257
258bool LCodeGen::GenerateDeferredCode() {
259 ASSERT(is_generating());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000260 if (deferred_.length() > 0) {
261 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
262 LDeferredCode* code = deferred_[i];
263 __ bind(code->entry());
264 code->Generate();
265 __ jmp(code->exit());
266 }
267
268 // Pad code to ensure that the last piece of deferred code have
269 // room for lazy bailout.
270 while ((masm()->pc_offset() - LastSafepointEnd())
271 < Deoptimizer::patch_size()) {
272 __ nop();
273 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100274 }
275
Ben Murdoch257744e2011-11-30 15:57:28 +0000276 // Force constant pool emission at the end of the deferred code to make
277 // sure that no constant pools are emitted after.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100278 masm()->CheckConstPool(true, false);
279
Ben Murdoch257744e2011-11-30 15:57:28 +0000280 return !is_aborted();
281}
282
283
284bool LCodeGen::GenerateDeoptJumpTable() {
285 // Check that the jump table is accessible from everywhere in the function
286 // code, ie that offsets to the table can be encoded in the 24bit signed
287 // immediate of a branch instruction.
288 // To simplify we consider the code size from the first instruction to the
289 // end of the jump table. We also don't consider the pc load delta.
290 // Each entry in the jump table generates one instruction and inlines one
291 // 32bit data after it.
292 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
293 deopt_jump_table_.length() * 2)) {
294 Abort("Generated code is too large");
295 }
296
297 // Block the constant pool emission during the jump table emission.
298 __ BlockConstPoolFor(deopt_jump_table_.length());
299 __ RecordComment("[ Deoptimisation jump table");
300 Label table_start;
301 __ bind(&table_start);
302 for (int i = 0; i < deopt_jump_table_.length(); i++) {
303 __ bind(&deopt_jump_table_[i].label);
304 __ ldr(pc, MemOperand(pc, Assembler::kInstrSize - Assembler::kPcLoadDelta));
305 __ dd(reinterpret_cast<uint32_t>(deopt_jump_table_[i].address));
306 }
307 ASSERT(masm()->InstructionsGeneratedSince(&table_start) ==
308 deopt_jump_table_.length() * 2);
309 __ RecordComment("]");
310
311 // The deoptimization jump table is the last part of the instruction
312 // sequence. Mark the generated code as done unless we bailed out.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100313 if (!is_aborted()) status_ = DONE;
314 return !is_aborted();
315}
316
317
318bool LCodeGen::GenerateSafepointTable() {
319 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +0000320 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100321 return !is_aborted();
322}
323
324
325Register LCodeGen::ToRegister(int index) const {
326 return Register::FromAllocationIndex(index);
327}
328
329
330DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
331 return DoubleRegister::FromAllocationIndex(index);
332}
333
334
335Register LCodeGen::ToRegister(LOperand* op) const {
336 ASSERT(op->IsRegister());
337 return ToRegister(op->index());
338}
339
340
341Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
342 if (op->IsRegister()) {
343 return ToRegister(op->index());
344 } else if (op->IsConstantOperand()) {
345 __ mov(scratch, ToOperand(op));
346 return scratch;
347 } else if (op->IsStackSlot() || op->IsArgument()) {
348 __ ldr(scratch, ToMemOperand(op));
349 return scratch;
350 }
351 UNREACHABLE();
352 return scratch;
353}
354
355
356DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
357 ASSERT(op->IsDoubleRegister());
358 return ToDoubleRegister(op->index());
359}
360
361
362DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
363 SwVfpRegister flt_scratch,
364 DoubleRegister dbl_scratch) {
365 if (op->IsDoubleRegister()) {
366 return ToDoubleRegister(op->index());
367 } else if (op->IsConstantOperand()) {
368 LConstantOperand* const_op = LConstantOperand::cast(op);
369 Handle<Object> literal = chunk_->LookupLiteral(const_op);
370 Representation r = chunk_->LookupLiteralRepresentation(const_op);
371 if (r.IsInteger32()) {
372 ASSERT(literal->IsNumber());
373 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
374 __ vmov(flt_scratch, ip);
375 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
376 return dbl_scratch;
377 } else if (r.IsDouble()) {
378 Abort("unsupported double immediate");
379 } else if (r.IsTagged()) {
380 Abort("unsupported tagged immediate");
381 }
382 } else if (op->IsStackSlot() || op->IsArgument()) {
383 // TODO(regis): Why is vldr not taking a MemOperand?
384 // __ vldr(dbl_scratch, ToMemOperand(op));
385 MemOperand mem_op = ToMemOperand(op);
386 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
387 return dbl_scratch;
388 }
389 UNREACHABLE();
390 return dbl_scratch;
391}
392
393
394int LCodeGen::ToInteger32(LConstantOperand* op) const {
395 Handle<Object> value = chunk_->LookupLiteral(op);
396 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
397 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
398 value->Number());
399 return static_cast<int32_t>(value->Number());
400}
401
402
403Operand LCodeGen::ToOperand(LOperand* op) {
404 if (op->IsConstantOperand()) {
405 LConstantOperand* const_op = LConstantOperand::cast(op);
406 Handle<Object> literal = chunk_->LookupLiteral(const_op);
407 Representation r = chunk_->LookupLiteralRepresentation(const_op);
408 if (r.IsInteger32()) {
409 ASSERT(literal->IsNumber());
410 return Operand(static_cast<int32_t>(literal->Number()));
411 } else if (r.IsDouble()) {
412 Abort("ToOperand Unsupported double immediate.");
413 }
414 ASSERT(r.IsTagged());
415 return Operand(literal);
416 } else if (op->IsRegister()) {
417 return Operand(ToRegister(op));
418 } else if (op->IsDoubleRegister()) {
419 Abort("ToOperand IsDoubleRegister unimplemented");
420 return Operand(0);
421 }
422 // Stack slots not implemented, use ToMemOperand instead.
423 UNREACHABLE();
424 return Operand(0);
425}
426
427
428MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100429 ASSERT(!op->IsRegister());
430 ASSERT(!op->IsDoubleRegister());
431 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
432 int index = op->index();
433 if (index >= 0) {
434 // Local or spill slot. Skip the frame pointer, function, and
435 // context in the fixed part of the frame.
436 return MemOperand(fp, -(index + 3) * kPointerSize);
437 } else {
438 // Incoming parameter. Skip the return address.
439 return MemOperand(fp, -(index - 1) * kPointerSize);
440 }
441}
442
443
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100444MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
445 ASSERT(op->IsDoubleStackSlot());
446 int index = op->index();
447 if (index >= 0) {
448 // Local or spill slot. Skip the frame pointer, function, context,
449 // and the first word of the double in the fixed part of the frame.
450 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
451 } else {
452 // Incoming parameter. Skip the return address and the first word of
453 // the double.
454 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
455 }
456}
457
458
Ben Murdochb8e0da22011-05-16 14:20:40 +0100459void LCodeGen::WriteTranslation(LEnvironment* environment,
460 Translation* translation) {
461 if (environment == NULL) return;
462
463 // The translation includes one command per value in the environment.
464 int translation_size = environment->values()->length();
465 // The output frame height does not include the parameters.
466 int height = translation_size - environment->parameter_count();
467
468 WriteTranslation(environment->outer(), translation);
469 int closure_id = DefineDeoptimizationLiteral(environment->closure());
470 translation->BeginFrame(environment->ast_id(), closure_id, height);
471 for (int i = 0; i < translation_size; ++i) {
472 LOperand* value = environment->values()->at(i);
473 // spilled_registers_ and spilled_double_registers_ are either
474 // both NULL or both set.
475 if (environment->spilled_registers() != NULL && value != NULL) {
476 if (value->IsRegister() &&
477 environment->spilled_registers()[value->index()] != NULL) {
478 translation->MarkDuplicate();
479 AddToTranslation(translation,
480 environment->spilled_registers()[value->index()],
481 environment->HasTaggedValueAt(i));
482 } else if (
483 value->IsDoubleRegister() &&
484 environment->spilled_double_registers()[value->index()] != NULL) {
485 translation->MarkDuplicate();
486 AddToTranslation(
487 translation,
488 environment->spilled_double_registers()[value->index()],
489 false);
490 }
491 }
492
493 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
494 }
495}
496
497
Ben Murdochb0fe1622011-05-05 13:52:32 +0100498void LCodeGen::AddToTranslation(Translation* translation,
499 LOperand* op,
500 bool is_tagged) {
501 if (op == NULL) {
502 // TODO(twuerthinger): Introduce marker operands to indicate that this value
503 // is not present and must be reconstructed from the deoptimizer. Currently
504 // this is only used for the arguments object.
505 translation->StoreArgumentsObject();
506 } else if (op->IsStackSlot()) {
507 if (is_tagged) {
508 translation->StoreStackSlot(op->index());
509 } else {
510 translation->StoreInt32StackSlot(op->index());
511 }
512 } else if (op->IsDoubleStackSlot()) {
513 translation->StoreDoubleStackSlot(op->index());
514 } else if (op->IsArgument()) {
515 ASSERT(is_tagged);
Ben Murdoch257744e2011-11-30 15:57:28 +0000516 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100517 translation->StoreStackSlot(src_index);
518 } else if (op->IsRegister()) {
519 Register reg = ToRegister(op);
520 if (is_tagged) {
521 translation->StoreRegister(reg);
522 } else {
523 translation->StoreInt32Register(reg);
524 }
525 } else if (op->IsDoubleRegister()) {
526 DoubleRegister reg = ToDoubleRegister(op);
527 translation->StoreDoubleRegister(reg);
528 } else if (op->IsConstantOperand()) {
529 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
530 int src_index = DefineDeoptimizationLiteral(literal);
531 translation->StoreLiteral(src_index);
532 } else {
533 UNREACHABLE();
534 }
535}
536
537
538void LCodeGen::CallCode(Handle<Code> code,
539 RelocInfo::Mode mode,
540 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100541 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
542}
543
544
545void LCodeGen::CallCodeGeneric(Handle<Code> code,
546 RelocInfo::Mode mode,
547 LInstruction* instr,
548 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100549 ASSERT(instr != NULL);
550 LPointerMap* pointers = instr->pointer_map();
551 RecordPosition(pointers->position());
552 __ Call(code, mode);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100553 RegisterLazyDeoptimization(instr, safepoint_mode);
Ben Murdoch18a6f572011-07-25 17:16:09 +0100554
555 // Signal that we don't inline smi code before these stubs in the
556 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000557 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdoch18a6f572011-07-25 17:16:09 +0100558 code->kind() == Code::COMPARE_IC) {
559 __ nop();
560 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100561}
562
563
Steve Block44f0eee2011-05-26 01:26:41 +0100564void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100565 int num_arguments,
566 LInstruction* instr) {
567 ASSERT(instr != NULL);
568 LPointerMap* pointers = instr->pointer_map();
569 ASSERT(pointers != NULL);
570 RecordPosition(pointers->position());
571
572 __ CallRuntime(function, num_arguments);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100573 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100574}
575
576
Ben Murdoch8b112d22011-06-08 16:22:53 +0100577void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
578 int argc,
579 LInstruction* instr) {
580 __ CallRuntimeSaveDoubles(id);
581 RecordSafepointWithRegisters(
582 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
583}
584
585
586void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
587 SafepointMode safepoint_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100588 // Create the environment to bailout to. If the call has side effects
589 // execution has to continue after the call otherwise execution can continue
590 // from a previous bailout point repeating the call.
591 LEnvironment* deoptimization_environment;
592 if (instr->HasDeoptimizationEnvironment()) {
593 deoptimization_environment = instr->deoptimization_environment();
594 } else {
595 deoptimization_environment = instr->environment();
596 }
597
598 RegisterEnvironmentForDeoptimization(deoptimization_environment);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100599 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
600 RecordSafepoint(instr->pointer_map(),
601 deoptimization_environment->deoptimization_index());
602 } else {
603 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
604 RecordSafepointWithRegisters(
605 instr->pointer_map(),
606 0,
607 deoptimization_environment->deoptimization_index());
608 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100609}
610
611
612void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
613 if (!environment->HasBeenRegistered()) {
614 // Physical stack frame layout:
615 // -x ............. -4 0 ..................................... y
616 // [incoming arguments] [spill slots] [pushed outgoing arguments]
617
618 // Layout of the environment:
619 // 0 ..................................................... size-1
620 // [parameters] [locals] [expression stack including arguments]
621
622 // Layout of the translation:
623 // 0 ........................................................ size - 1 + 4
624 // [expression stack including arguments] [locals] [4 words] [parameters]
625 // |>------------ translation_size ------------<|
626
627 int frame_count = 0;
628 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
629 ++frame_count;
630 }
631 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100632 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100633 int deoptimization_index = deoptimizations_.length();
634 environment->Register(deoptimization_index, translation.index());
635 deoptimizations_.Add(environment);
636 }
637}
638
639
640void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
641 RegisterEnvironmentForDeoptimization(environment);
642 ASSERT(environment->HasBeenRegistered());
643 int id = environment->deoptimization_index();
644 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
645 ASSERT(entry != NULL);
646 if (entry == NULL) {
647 Abort("bailout was not prepared");
648 return;
649 }
650
651 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
652
653 if (FLAG_deopt_every_n_times == 1 &&
654 info_->shared_info()->opt_count() == id) {
655 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
656 return;
657 }
658
Ben Murdoch257744e2011-11-30 15:57:28 +0000659 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt", cc);
660
Steve Block1e0659c2011-05-24 12:43:12 +0100661 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100662 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
663 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000664 // We often have several deopts to the same entry, reuse the last
665 // jump entry if this is the case.
666 if (deopt_jump_table_.is_empty() ||
667 (deopt_jump_table_.last().address != entry)) {
668 deopt_jump_table_.Add(JumpTableEntry(entry));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100669 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000670 __ b(cc, &deopt_jump_table_.last().label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100671 }
672}
673
674
675void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
676 int length = deoptimizations_.length();
677 if (length == 0) return;
678 ASSERT(FLAG_deopt);
679 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100680 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100681
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100682 Handle<ByteArray> translations = translations_.CreateByteArray();
683 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100684 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
685
686 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100687 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100688 for (int i = 0; i < deoptimization_literals_.length(); i++) {
689 literals->set(i, *deoptimization_literals_[i]);
690 }
691 data->SetLiteralArray(*literals);
692
693 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
694 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
695
696 // Populate the deoptimization entries.
697 for (int i = 0; i < length; i++) {
698 LEnvironment* env = deoptimizations_[i];
699 data->SetAstId(i, Smi::FromInt(env->ast_id()));
700 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
701 data->SetArgumentsStackHeight(i,
702 Smi::FromInt(env->arguments_stack_height()));
703 }
704 code->set_deoptimization_data(*data);
705}
706
707
708int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
709 int result = deoptimization_literals_.length();
710 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
711 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
712 }
713 deoptimization_literals_.Add(literal);
714 return result;
715}
716
717
718void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
719 ASSERT(deoptimization_literals_.length() == 0);
720
721 const ZoneList<Handle<JSFunction> >* inlined_closures =
722 chunk()->inlined_closures();
723
724 for (int i = 0, length = inlined_closures->length();
725 i < length;
726 i++) {
727 DefineDeoptimizationLiteral(inlined_closures->at(i));
728 }
729
730 inlined_function_count_ = deoptimization_literals_.length();
731}
732
733
Steve Block1e0659c2011-05-24 12:43:12 +0100734void LCodeGen::RecordSafepoint(
735 LPointerMap* pointers,
736 Safepoint::Kind kind,
737 int arguments,
738 int deoptimization_index) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100739 ASSERT(expected_safepoint_kind_ == kind);
740
Ben Murdochb0fe1622011-05-05 13:52:32 +0100741 const ZoneList<LOperand*>* operands = pointers->operands();
742 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100743 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100744 for (int i = 0; i < operands->length(); i++) {
745 LOperand* pointer = operands->at(i);
746 if (pointer->IsStackSlot()) {
747 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100748 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
749 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100750 }
751 }
Steve Block1e0659c2011-05-24 12:43:12 +0100752 if (kind & Safepoint::kWithRegisters) {
753 // Register cp always contains a pointer to the context.
754 safepoint.DefinePointerRegister(cp);
755 }
756}
757
758
759void LCodeGen::RecordSafepoint(LPointerMap* pointers,
760 int deoptimization_index) {
761 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100762}
763
764
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100765void LCodeGen::RecordSafepoint(int deoptimization_index) {
766 LPointerMap empty_pointers(RelocInfo::kNoPosition);
767 RecordSafepoint(&empty_pointers, deoptimization_index);
768}
769
770
Ben Murdochb0fe1622011-05-05 13:52:32 +0100771void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
772 int arguments,
773 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100774 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
775 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100776}
777
778
Ben Murdochb8e0da22011-05-16 14:20:40 +0100779void LCodeGen::RecordSafepointWithRegistersAndDoubles(
780 LPointerMap* pointers,
781 int arguments,
782 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100783 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
784 deoptimization_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100785}
786
787
Ben Murdochb0fe1622011-05-05 13:52:32 +0100788void LCodeGen::RecordPosition(int position) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000789 if (position == RelocInfo::kNoPosition) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100790 masm()->positions_recorder()->RecordPosition(position);
791}
792
793
794void LCodeGen::DoLabel(LLabel* label) {
795 if (label->is_loop_header()) {
796 Comment(";;; B%d - LOOP entry", label->block_id());
797 } else {
798 Comment(";;; B%d", label->block_id());
799 }
800 __ bind(label->label());
801 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +0000802 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100803}
804
805
806void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100807 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100808}
809
810
811void LCodeGen::DoGap(LGap* gap) {
812 for (int i = LGap::FIRST_INNER_POSITION;
813 i <= LGap::LAST_INNER_POSITION;
814 i++) {
815 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
816 LParallelMove* move = gap->GetParallelMove(inner_pos);
817 if (move != NULL) DoParallelMove(move);
818 }
819
820 LInstruction* next = GetNextInstruction();
821 if (next != NULL && next->IsLazyBailout()) {
822 int pc = masm()->pc_offset();
823 safepoints_.SetPcAfterGap(pc);
824 }
825}
826
827
Ben Murdoch257744e2011-11-30 15:57:28 +0000828void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
829 DoGap(instr);
830}
831
832
Ben Murdochb0fe1622011-05-05 13:52:32 +0100833void LCodeGen::DoParameter(LParameter* instr) {
834 // Nothing to do.
835}
836
837
838void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100839 ASSERT(ToRegister(instr->result()).is(r0));
840 switch (instr->hydrogen()->major_key()) {
841 case CodeStub::RegExpConstructResult: {
842 RegExpConstructResultStub stub;
843 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
844 break;
845 }
846 case CodeStub::RegExpExec: {
847 RegExpExecStub stub;
848 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
849 break;
850 }
851 case CodeStub::SubString: {
852 SubStringStub stub;
853 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
854 break;
855 }
Steve Block9fac8402011-05-12 15:51:54 +0100856 case CodeStub::NumberToString: {
857 NumberToStringStub stub;
858 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
859 break;
860 }
861 case CodeStub::StringAdd: {
862 StringAddStub stub(NO_STRING_ADD_FLAGS);
863 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
864 break;
865 }
866 case CodeStub::StringCompare: {
867 StringCompareStub stub;
868 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
869 break;
870 }
871 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100872 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100873 TranscendentalCacheStub stub(instr->transcendental_type(),
874 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100875 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100876 break;
877 }
878 default:
879 UNREACHABLE();
880 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100881}
882
883
884void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
885 // Nothing to do.
886}
887
888
889void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100890 if (instr->hydrogen()->HasPowerOf2Divisor()) {
891 Register dividend = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000892 Register result = ToRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +0100893
894 int32_t divisor =
895 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
896
897 if (divisor < 0) divisor = -divisor;
898
899 Label positive_dividend, done;
900 __ cmp(dividend, Operand(0));
901 __ b(pl, &positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000902 __ rsb(result, dividend, Operand(0));
903 __ and_(result, result, Operand(divisor - 1), SetCC);
Steve Block44f0eee2011-05-26 01:26:41 +0100904 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000905 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100906 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000907 __ rsb(result, result, Operand(0));
908 __ b(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100909 __ bind(&positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000910 __ and_(result, dividend, Operand(divisor - 1));
Steve Block44f0eee2011-05-26 01:26:41 +0100911 __ bind(&done);
912 return;
913 }
914
Ben Murdochb8e0da22011-05-16 14:20:40 +0100915 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100916 Register left = ToRegister(instr->InputAt(0));
917 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100918 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100919
Steve Block44f0eee2011-05-26 01:26:41 +0100920 Register scratch = scratch0();
921 Register scratch2 = ToRegister(instr->TempAt(0));
922 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
923 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
924 DwVfpRegister quotient = double_scratch0();
925
Steve Block44f0eee2011-05-26 01:26:41 +0100926 ASSERT(!dividend.is(divisor));
927 ASSERT(!dividend.is(quotient));
928 ASSERT(!divisor.is(quotient));
929 ASSERT(!scratch.is(left));
930 ASSERT(!scratch.is(right));
931 ASSERT(!scratch.is(result));
932
933 Label done, vfp_modulo, both_positive, right_negative;
934
Ben Murdochb8e0da22011-05-16 14:20:40 +0100935 // Check for x % 0.
936 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100937 __ cmp(right, Operand(0));
938 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100939 }
940
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000941 __ Move(result, left);
942
Steve Block44f0eee2011-05-26 01:26:41 +0100943 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100944 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100945 __ b(eq, &done);
946 // Preload right in a vfp register.
947 __ vmov(divisor.low(), right);
948 __ b(lt, &vfp_modulo);
949
950 __ cmp(left, Operand(right));
951 __ b(lt, &done);
952
953 // Check for (positive) power of two on the right hand side.
954 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
955 scratch,
956 &right_negative,
957 &both_positive);
958 // Perform modulo operation (scratch contains right - 1).
959 __ and_(result, scratch, Operand(left));
960 __ b(&done);
961
962 __ bind(&right_negative);
963 // Negate right. The sign of the divisor does not matter.
964 __ rsb(right, right, Operand(0));
965
966 __ bind(&both_positive);
967 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100968 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100969 // left hand side, the left hand side is the result.
970 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100971 __ mov(scratch, left);
972 for (int i = 0; i < kUnfolds; i++) {
973 // Check if the left hand side is less or equal than the
974 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100975 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100976 __ mov(result, scratch, LeaveCC, lt);
977 __ b(lt, &done);
978 // If not, reduce the left hand side by the right hand
979 // side and check again.
980 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
981 }
982
Steve Block44f0eee2011-05-26 01:26:41 +0100983 __ bind(&vfp_modulo);
984 // Load the arguments in VFP registers.
985 // The divisor value is preloaded before. Be careful that 'right' is only live
986 // on entry.
987 __ vmov(dividend.low(), left);
988 // From here on don't use right as it may have been reallocated (for example
989 // to scratch2).
990 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +0100991
Steve Block44f0eee2011-05-26 01:26:41 +0100992 __ vcvt_f64_s32(dividend, dividend.low());
993 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100994
Steve Block44f0eee2011-05-26 01:26:41 +0100995 // We do not care about the sign of the divisor.
996 __ vabs(divisor, divisor);
997 // Compute the quotient and round it to a 32bit integer.
998 __ vdiv(quotient, dividend, divisor);
999 __ vcvt_s32_f64(quotient.low(), quotient);
1000 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001001
Steve Block44f0eee2011-05-26 01:26:41 +01001002 // Compute the remainder in result.
1003 DwVfpRegister double_scratch = dividend;
1004 __ vmul(double_scratch, divisor, quotient);
1005 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
1006 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001007
Steve Block44f0eee2011-05-26 01:26:41 +01001008 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1009 __ sub(result, left, scratch);
1010 } else {
1011 Label ok;
1012 // Check for -0.
1013 __ sub(scratch2, left, scratch, SetCC);
1014 __ b(ne, &ok);
1015 __ cmp(left, Operand(0));
1016 DeoptimizeIf(mi, instr->environment());
1017 __ bind(&ok);
1018 // Load the result and we are done.
1019 __ mov(result, scratch2);
1020 }
1021
Ben Murdochb8e0da22011-05-16 14:20:40 +01001022 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001023}
1024
1025
1026void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001027 class DeferredDivI: public LDeferredCode {
1028 public:
1029 DeferredDivI(LCodeGen* codegen, LDivI* instr)
1030 : LDeferredCode(codegen), instr_(instr) { }
1031 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001032 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001033 }
1034 private:
1035 LDivI* instr_;
1036 };
1037
Steve Block1e0659c2011-05-24 12:43:12 +01001038 const Register left = ToRegister(instr->InputAt(0));
1039 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001040 const Register scratch = scratch0();
1041 const Register result = ToRegister(instr->result());
1042
1043 // Check for x / 0.
1044 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001045 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001046 DeoptimizeIf(eq, instr->environment());
1047 }
1048
1049 // Check for (0 / -x) that will produce negative zero.
1050 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1051 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +01001052 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001053 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01001054 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001055 DeoptimizeIf(mi, instr->environment());
1056 __ bind(&left_not_zero);
1057 }
1058
1059 // Check for (-kMinInt / -1).
1060 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1061 Label left_not_min_int;
1062 __ cmp(left, Operand(kMinInt));
1063 __ b(ne, &left_not_min_int);
1064 __ cmp(right, Operand(-1));
1065 DeoptimizeIf(eq, instr->environment());
1066 __ bind(&left_not_min_int);
1067 }
1068
1069 Label done, deoptimize;
1070 // Test for a few common cases first.
1071 __ cmp(right, Operand(1));
1072 __ mov(result, left, LeaveCC, eq);
1073 __ b(eq, &done);
1074
1075 __ cmp(right, Operand(2));
1076 __ tst(left, Operand(1), eq);
1077 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1078 __ b(eq, &done);
1079
1080 __ cmp(right, Operand(4));
1081 __ tst(left, Operand(3), eq);
1082 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1083 __ b(eq, &done);
1084
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001085 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001086 // to be tagged to Smis. If that is not possible, deoptimize.
1087 DeferredDivI* deferred = new DeferredDivI(this, instr);
1088
1089 __ TrySmiTag(left, &deoptimize, scratch);
1090 __ TrySmiTag(right, &deoptimize, scratch);
1091
1092 __ b(al, deferred->entry());
1093 __ bind(deferred->exit());
1094
1095 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001096 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001097 __ SmiUntag(result);
1098 __ b(&done);
1099
1100 __ bind(&deoptimize);
1101 DeoptimizeIf(al, instr->environment());
1102 __ bind(&done);
1103}
1104
1105
Steve Block1e0659c2011-05-24 12:43:12 +01001106template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001107void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1108 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001109 Register left = ToRegister(instr->InputAt(0));
1110 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001111
Ben Murdoch8b112d22011-06-08 16:22:53 +01001112 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001113 // Move left to r1 and right to r0 for the stub call.
1114 if (left.is(r1)) {
1115 __ Move(r0, right);
1116 } else if (left.is(r0) && right.is(r1)) {
1117 __ Swap(r0, r1, r2);
1118 } else if (left.is(r0)) {
1119 ASSERT(!right.is(r1));
1120 __ mov(r1, r0);
1121 __ mov(r0, right);
1122 } else {
1123 ASSERT(!left.is(r0) && !right.is(r0));
1124 __ mov(r0, right);
1125 __ mov(r1, left);
1126 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001127 BinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001128 __ CallStub(&stub);
1129 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1130 0,
1131 Safepoint::kNoDeoptimizationIndex);
1132 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001133 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001134}
1135
1136
1137void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001138 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001139 Register result = ToRegister(instr->result());
1140 // Note that result may alias left.
Steve Block1e0659c2011-05-24 12:43:12 +01001141 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001142 LOperand* right_op = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001143
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001144 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1145 bool bailout_on_minus_zero =
1146 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001147
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001148 if (right_op->IsConstantOperand() && !can_overflow) {
1149 // Use optimized code for specific constants.
1150 int32_t constant = ToInteger32(LConstantOperand::cast(right_op));
1151
1152 if (bailout_on_minus_zero && (constant < 0)) {
1153 // The case of a null constant will be handled separately.
1154 // If constant is negative and left is null, the result should be -0.
1155 __ cmp(left, Operand(0));
1156 DeoptimizeIf(eq, instr->environment());
1157 }
1158
1159 switch (constant) {
1160 case -1:
1161 __ rsb(result, left, Operand(0));
1162 break;
1163 case 0:
1164 if (bailout_on_minus_zero) {
1165 // If left is strictly negative and the constant is null, the
1166 // result is -0. Deoptimize if required, otherwise return 0.
1167 __ cmp(left, Operand(0));
1168 DeoptimizeIf(mi, instr->environment());
1169 }
1170 __ mov(result, Operand(0));
1171 break;
1172 case 1:
1173 __ Move(result, left);
1174 break;
1175 default:
1176 // Multiplying by powers of two and powers of two plus or minus
1177 // one can be done faster with shifted operands.
1178 // For other constants we emit standard code.
1179 int32_t mask = constant >> 31;
1180 uint32_t constant_abs = (constant + mask) ^ mask;
1181
1182 if (IsPowerOf2(constant_abs) ||
1183 IsPowerOf2(constant_abs - 1) ||
1184 IsPowerOf2(constant_abs + 1)) {
1185 if (IsPowerOf2(constant_abs)) {
1186 int32_t shift = WhichPowerOf2(constant_abs);
1187 __ mov(result, Operand(left, LSL, shift));
1188 } else if (IsPowerOf2(constant_abs - 1)) {
1189 int32_t shift = WhichPowerOf2(constant_abs - 1);
1190 __ add(result, left, Operand(left, LSL, shift));
1191 } else if (IsPowerOf2(constant_abs + 1)) {
1192 int32_t shift = WhichPowerOf2(constant_abs + 1);
1193 __ rsb(result, left, Operand(left, LSL, shift));
1194 }
1195
1196 // Correct the sign of the result is the constant is negative.
1197 if (constant < 0) __ rsb(result, result, Operand(0));
1198
1199 } else {
1200 // Generate standard code.
1201 __ mov(ip, Operand(constant));
1202 __ mul(result, left, ip);
1203 }
1204 }
1205
Ben Murdochb0fe1622011-05-05 13:52:32 +01001206 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001207 Register right = EmitLoadRegister(right_op, scratch);
1208 if (bailout_on_minus_zero) {
1209 __ orr(ToRegister(instr->TempAt(0)), left, right);
1210 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001211
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001212 if (can_overflow) {
1213 // scratch:result = left * right.
1214 __ smull(result, scratch, left, right);
1215 __ cmp(scratch, Operand(result, ASR, 31));
1216 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001217 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001218 __ mul(result, left, right);
1219 }
1220
1221 if (bailout_on_minus_zero) {
1222 // Bail out if the result is supposed to be negative zero.
1223 Label done;
1224 __ cmp(result, Operand(0));
1225 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001226 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001227 DeoptimizeIf(mi, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001228 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001229 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001230 }
1231}
1232
1233
1234void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001235 LOperand* left_op = instr->InputAt(0);
1236 LOperand* right_op = instr->InputAt(1);
1237 ASSERT(left_op->IsRegister());
1238 Register left = ToRegister(left_op);
1239 Register result = ToRegister(instr->result());
1240 Operand right(no_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001241
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001242 if (right_op->IsStackSlot() || right_op->IsArgument()) {
1243 right = Operand(EmitLoadRegister(right_op, ip));
Steve Block44f0eee2011-05-26 01:26:41 +01001244 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001245 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand());
1246 right = ToOperand(right_op);
Steve Block44f0eee2011-05-26 01:26:41 +01001247 }
1248
Ben Murdochb0fe1622011-05-05 13:52:32 +01001249 switch (instr->op()) {
1250 case Token::BIT_AND:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001251 __ and_(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001252 break;
1253 case Token::BIT_OR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001254 __ orr(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001255 break;
1256 case Token::BIT_XOR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001257 __ eor(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001258 break;
1259 default:
1260 UNREACHABLE();
1261 break;
1262 }
1263}
1264
1265
1266void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001267 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
1268 // result may alias either of them.
1269 LOperand* right_op = instr->InputAt(1);
1270 Register left = ToRegister(instr->InputAt(0));
1271 Register result = ToRegister(instr->result());
Steve Block9fac8402011-05-12 15:51:54 +01001272 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001273 if (right_op->IsRegister()) {
1274 // Mask the right_op operand.
1275 __ and_(scratch, ToRegister(right_op), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001276 switch (instr->op()) {
1277 case Token::SAR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001278 __ mov(result, Operand(left, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001279 break;
1280 case Token::SHR:
1281 if (instr->can_deopt()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001282 __ mov(result, Operand(left, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001283 DeoptimizeIf(mi, instr->environment());
1284 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001285 __ mov(result, Operand(left, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001286 }
1287 break;
1288 case Token::SHL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001289 __ mov(result, Operand(left, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001290 break;
1291 default:
1292 UNREACHABLE();
1293 break;
1294 }
1295 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001296 // Mask the right_op operand.
1297 int value = ToInteger32(LConstantOperand::cast(right_op));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001298 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1299 switch (instr->op()) {
1300 case Token::SAR:
1301 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001302 __ mov(result, Operand(left, ASR, shift_count));
1303 } else {
1304 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001305 }
1306 break;
1307 case Token::SHR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001308 if (shift_count != 0) {
1309 __ mov(result, Operand(left, LSR, shift_count));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001310 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001311 if (instr->can_deopt()) {
1312 __ tst(left, Operand(0x80000000));
1313 DeoptimizeIf(ne, instr->environment());
1314 }
1315 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001316 }
1317 break;
1318 case Token::SHL:
1319 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001320 __ mov(result, Operand(left, LSL, shift_count));
1321 } else {
1322 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001323 }
1324 break;
1325 default:
1326 UNREACHABLE();
1327 break;
1328 }
1329 }
1330}
1331
1332
1333void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001334 LOperand* left = instr->InputAt(0);
1335 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001336 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001337 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1338 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1339
1340 if (right->IsStackSlot() || right->IsArgument()) {
1341 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001342 __ sub(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001343 } else {
1344 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001345 __ sub(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001346 }
1347
1348 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001349 DeoptimizeIf(vs, instr->environment());
1350 }
1351}
1352
1353
1354void LCodeGen::DoConstantI(LConstantI* instr) {
1355 ASSERT(instr->result()->IsRegister());
1356 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1357}
1358
1359
1360void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001361 ASSERT(instr->result()->IsDoubleRegister());
1362 DwVfpRegister result = ToDoubleRegister(instr->result());
1363 double v = instr->value();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001364 __ Vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001365}
1366
1367
1368void LCodeGen::DoConstantT(LConstantT* instr) {
1369 ASSERT(instr->result()->IsRegister());
1370 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1371}
1372
1373
Steve Block9fac8402011-05-12 15:51:54 +01001374void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001375 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001376 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001377 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1378}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001379
Ben Murdochb0fe1622011-05-05 13:52:32 +01001380
Steve Block44f0eee2011-05-26 01:26:41 +01001381void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001382 Register result = ToRegister(instr->result());
1383 Register array = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01001384 __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01001385}
1386
1387
Steve Block9fac8402011-05-12 15:51:54 +01001388void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1389 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001390 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001391 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001392}
1393
1394
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001395void LCodeGen::DoElementsKind(LElementsKind* instr) {
1396 Register result = ToRegister(instr->result());
1397 Register input = ToRegister(instr->InputAt(0));
1398
1399 // Load map into |result|.
1400 __ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
1401 // Load the map's "bit field 2" into |result|. We only need the first byte,
1402 // but the following bit field extraction takes care of that anyway.
1403 __ ldr(result, FieldMemOperand(result, Map::kBitField2Offset));
1404 // Retrieve elements_kind from bit field 2.
1405 __ ubfx(result, result, Map::kElementsKindShift, Map::kElementsKindBitCount);
1406}
1407
1408
Ben Murdochb0fe1622011-05-05 13:52:32 +01001409void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001410 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001411 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001412 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001413 Label done;
1414
1415 // If the object is a smi return the object.
1416 __ tst(input, Operand(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001417 __ Move(result, input, eq);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001418 __ b(eq, &done);
1419
1420 // If the object is not a value type, return the object.
1421 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001422 __ Move(result, input, ne);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001423 __ b(ne, &done);
1424 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1425
1426 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001427}
1428
1429
1430void LCodeGen::DoBitNotI(LBitNotI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001431 Register input = ToRegister(instr->InputAt(0));
1432 Register result = ToRegister(instr->result());
1433 __ mvn(result, Operand(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001434}
1435
1436
1437void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001438 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001439 __ push(input_reg);
1440 CallRuntime(Runtime::kThrow, 1, instr);
1441
1442 if (FLAG_debug_code) {
1443 __ stop("Unreachable code.");
1444 }
1445}
1446
1447
1448void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001449 LOperand* left = instr->InputAt(0);
1450 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001451 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001452 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1453 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001454
Steve Block44f0eee2011-05-26 01:26:41 +01001455 if (right->IsStackSlot() || right->IsArgument()) {
1456 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001457 __ add(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001458 } else {
1459 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001460 __ add(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001461 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001462
Steve Block44f0eee2011-05-26 01:26:41 +01001463 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001464 DeoptimizeIf(vs, instr->environment());
1465 }
1466}
1467
1468
1469void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001470 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1471 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001472 DoubleRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001473 switch (instr->op()) {
1474 case Token::ADD:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001475 __ vadd(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001476 break;
1477 case Token::SUB:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001478 __ vsub(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001479 break;
1480 case Token::MUL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001481 __ vmul(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001482 break;
1483 case Token::DIV:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001484 __ vdiv(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001485 break;
1486 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001487 // Save r0-r3 on the stack.
1488 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1489
Ben Murdoch257744e2011-11-30 15:57:28 +00001490 __ PrepareCallCFunction(0, 2, scratch0());
1491 __ SetCallCDoubleArguments(left, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001492 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00001493 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1494 0, 2);
Steve Block1e0659c2011-05-24 12:43:12 +01001495 // Move the result in the double result register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001496 __ GetCFunctionDoubleResult(result);
Steve Block1e0659c2011-05-24 12:43:12 +01001497
1498 // Restore r0-r3.
1499 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001500 break;
1501 }
1502 default:
1503 UNREACHABLE();
1504 break;
1505 }
1506}
1507
1508
1509void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001510 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1511 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001512 ASSERT(ToRegister(instr->result()).is(r0));
1513
Ben Murdoch257744e2011-11-30 15:57:28 +00001514 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001515 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001516 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001517}
1518
1519
1520int LCodeGen::GetNextEmittedBlock(int block) {
1521 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1522 LLabel* label = chunk_->GetLabel(i);
1523 if (!label->HasReplacement()) return i;
1524 }
1525 return -1;
1526}
1527
1528
1529void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1530 int next_block = GetNextEmittedBlock(current_block_);
1531 right_block = chunk_->LookupDestination(right_block);
1532 left_block = chunk_->LookupDestination(left_block);
1533
1534 if (right_block == left_block) {
1535 EmitGoto(left_block);
1536 } else if (left_block == next_block) {
1537 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1538 } else if (right_block == next_block) {
1539 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1540 } else {
1541 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1542 __ b(chunk_->GetAssemblyLabel(right_block));
1543 }
1544}
1545
1546
1547void LCodeGen::DoBranch(LBranch* instr) {
1548 int true_block = chunk_->LookupDestination(instr->true_block_id());
1549 int false_block = chunk_->LookupDestination(instr->false_block_id());
1550
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001551 Representation r = instr->hydrogen()->value()->representation();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001552 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001553 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001554 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001555 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001556 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001557 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001558 Register scratch = scratch0();
1559
Ben Murdochb8e0da22011-05-16 14:20:40 +01001560 // Test the double value. Zero and NaN are false.
1561 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1562 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch6d7cb002011-08-04 19:25:22 +01001563 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001564 } else {
1565 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001566 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001567 if (instr->hydrogen()->value()->type().IsBoolean()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001568 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1569 __ cmp(reg, ip);
1570 EmitBranch(true_block, false_block, eq);
1571 } else {
1572 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1573 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1574
1575 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1576 __ cmp(reg, ip);
1577 __ b(eq, false_label);
1578 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1579 __ cmp(reg, ip);
1580 __ b(eq, true_label);
1581 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1582 __ cmp(reg, ip);
1583 __ b(eq, false_label);
1584 __ cmp(reg, Operand(0));
1585 __ b(eq, false_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001586 __ JumpIfSmi(reg, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001587
Ben Murdochb8e0da22011-05-16 14:20:40 +01001588 // Test double values. Zero and NaN are false.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001589 Label call_stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001590 DoubleRegister dbl_scratch = double_scratch0();
Steve Block9fac8402011-05-12 15:51:54 +01001591 Register scratch = scratch0();
1592 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001593 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01001594 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001595 __ b(ne, &call_stub);
1596 __ sub(ip, reg, Operand(kHeapObjectTag));
1597 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001598 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1599 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001600 __ b(ne, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001601 __ b(true_label);
1602
1603 // The conversion stub doesn't cause garbage collections so it's
1604 // safe to not record a safepoint after the call.
1605 __ bind(&call_stub);
1606 ToBooleanStub stub(reg);
1607 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1608 __ stm(db_w, sp, saved_regs);
1609 __ CallStub(&stub);
1610 __ cmp(reg, Operand(0));
1611 __ ldm(ia_w, sp, saved_regs);
Steve Block1e0659c2011-05-24 12:43:12 +01001612 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001613 }
1614 }
1615}
1616
1617
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001618void LCodeGen::EmitGoto(int block) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001619 block = chunk_->LookupDestination(block);
1620 int next_block = GetNextEmittedBlock(current_block_);
1621 if (block != next_block) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001622 __ jmp(chunk_->GetAssemblyLabel(block));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001623 }
1624}
1625
1626
Ben Murdochb0fe1622011-05-05 13:52:32 +01001627void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001628 EmitGoto(instr->block_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001629}
1630
1631
1632Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001633 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001634 switch (op) {
1635 case Token::EQ:
1636 case Token::EQ_STRICT:
1637 cond = eq;
1638 break;
1639 case Token::LT:
1640 cond = is_unsigned ? lo : lt;
1641 break;
1642 case Token::GT:
1643 cond = is_unsigned ? hi : gt;
1644 break;
1645 case Token::LTE:
1646 cond = is_unsigned ? ls : le;
1647 break;
1648 case Token::GTE:
1649 cond = is_unsigned ? hs : ge;
1650 break;
1651 case Token::IN:
1652 case Token::INSTANCEOF:
1653 default:
1654 UNREACHABLE();
1655 }
1656 return cond;
1657}
1658
1659
1660void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001661 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001662}
1663
1664
Ben Murdochb0fe1622011-05-05 13:52:32 +01001665void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001666 LOperand* left = instr->InputAt(0);
1667 LOperand* right = instr->InputAt(1);
1668 int false_block = chunk_->LookupDestination(instr->false_block_id());
1669 int true_block = chunk_->LookupDestination(instr->true_block_id());
1670
1671 if (instr->is_double()) {
1672 // Compare left and right as doubles and load the
1673 // resulting flags into the normal status register.
1674 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1675 // If a NaN is involved, i.e. the result is unordered (V set),
1676 // jump to false block label.
1677 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1678 } else {
1679 EmitCmpI(left, right);
1680 }
1681
1682 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1683 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001684}
1685
1686
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001687void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001688 Register left = ToRegister(instr->InputAt(0));
1689 Register right = ToRegister(instr->InputAt(1));
1690 int false_block = chunk_->LookupDestination(instr->false_block_id());
1691 int true_block = chunk_->LookupDestination(instr->true_block_id());
1692
1693 __ cmp(left, Operand(right));
1694 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001695}
1696
1697
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001698void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001699 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001700 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001701 int false_block = chunk_->LookupDestination(instr->false_block_id());
Ben Murdoch257744e2011-11-30 15:57:28 +00001702
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001703 __ cmp(left, Operand(instr->hydrogen()->right()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001704 EmitBranch(true_block, false_block, eq);
1705}
1706
1707
Ben Murdochb0fe1622011-05-05 13:52:32 +01001708void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001709 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001710 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001711
1712 // TODO(fsc): If the expression is known to be a smi, then it's
1713 // definitely not null. Jump to the false block.
1714
1715 int true_block = chunk_->LookupDestination(instr->true_block_id());
1716 int false_block = chunk_->LookupDestination(instr->false_block_id());
1717
1718 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1719 __ cmp(reg, ip);
1720 if (instr->is_strict()) {
1721 EmitBranch(true_block, false_block, eq);
1722 } else {
1723 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1724 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1725 __ b(eq, true_label);
1726 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1727 __ cmp(reg, ip);
1728 __ b(eq, true_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001729 __ JumpIfSmi(reg, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001730 // Check for undetectable objects by looking in the bit field in
1731 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001732 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1733 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1734 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1735 EmitBranch(true_block, false_block, ne);
1736 }
1737}
1738
1739
1740Condition LCodeGen::EmitIsObject(Register input,
1741 Register temp1,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001742 Label* is_not_object,
1743 Label* is_object) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001744 Register temp2 = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001745 __ JumpIfSmi(input, is_not_object);
1746
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001747 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
1748 __ cmp(input, temp2);
Steve Block1e0659c2011-05-24 12:43:12 +01001749 __ b(eq, is_object);
1750
1751 // Load map.
1752 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1753 // Undetectable objects behave like undefined.
1754 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1755 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1756 __ b(ne, is_not_object);
1757
1758 // Load instance type and check that it is in object type range.
1759 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001760 __ cmp(temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001761 __ b(lt, is_not_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001762 __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001763 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001764}
1765
1766
Ben Murdochb0fe1622011-05-05 13:52:32 +01001767void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001768 Register reg = ToRegister(instr->InputAt(0));
1769 Register temp1 = ToRegister(instr->TempAt(0));
1770 Register temp2 = scratch0();
1771
1772 int true_block = chunk_->LookupDestination(instr->true_block_id());
1773 int false_block = chunk_->LookupDestination(instr->false_block_id());
1774 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1775 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1776
1777 Condition true_cond =
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001778 EmitIsObject(reg, temp1, false_label, true_label);
Steve Block1e0659c2011-05-24 12:43:12 +01001779
1780 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001781}
1782
1783
Ben Murdochb0fe1622011-05-05 13:52:32 +01001784void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1785 int true_block = chunk_->LookupDestination(instr->true_block_id());
1786 int false_block = chunk_->LookupDestination(instr->false_block_id());
1787
Steve Block1e0659c2011-05-24 12:43:12 +01001788 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001789 __ tst(input_reg, Operand(kSmiTagMask));
1790 EmitBranch(true_block, false_block, eq);
1791}
1792
1793
Ben Murdoch257744e2011-11-30 15:57:28 +00001794void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1795 Register input = ToRegister(instr->InputAt(0));
1796 Register temp = ToRegister(instr->TempAt(0));
1797
1798 int true_block = chunk_->LookupDestination(instr->true_block_id());
1799 int false_block = chunk_->LookupDestination(instr->false_block_id());
1800
1801 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1802 __ ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
1803 __ ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
1804 __ tst(temp, Operand(1 << Map::kIsUndetectable));
1805 EmitBranch(true_block, false_block, ne);
1806}
1807
1808
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001809static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001810 InstanceType from = instr->from();
1811 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001812 if (from == FIRST_TYPE) return to;
1813 ASSERT(from == to || to == LAST_TYPE);
1814 return from;
1815}
1816
1817
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001818static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001819 InstanceType from = instr->from();
1820 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001821 if (from == to) return eq;
1822 if (to == LAST_TYPE) return hs;
1823 if (from == FIRST_TYPE) return ls;
1824 UNREACHABLE();
1825 return eq;
1826}
1827
1828
Ben Murdochb0fe1622011-05-05 13:52:32 +01001829void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001830 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001831 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001832
1833 int true_block = chunk_->LookupDestination(instr->true_block_id());
1834 int false_block = chunk_->LookupDestination(instr->false_block_id());
1835
1836 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1837
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001838 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001839
Steve Block1e0659c2011-05-24 12:43:12 +01001840 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1841 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001842}
1843
1844
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001845void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1846 Register input = ToRegister(instr->InputAt(0));
1847 Register result = ToRegister(instr->result());
1848
1849 if (FLAG_debug_code) {
1850 __ AbortIfNotString(input);
1851 }
1852
1853 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1854 __ IndexFromHash(result, result);
1855}
1856
1857
Ben Murdochb0fe1622011-05-05 13:52:32 +01001858void LCodeGen::DoHasCachedArrayIndexAndBranch(
1859 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001860 Register input = ToRegister(instr->InputAt(0));
1861 Register scratch = scratch0();
1862
1863 int true_block = chunk_->LookupDestination(instr->true_block_id());
1864 int false_block = chunk_->LookupDestination(instr->false_block_id());
1865
1866 __ ldr(scratch,
1867 FieldMemOperand(input, String::kHashFieldOffset));
1868 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1869 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001870}
1871
1872
Ben Murdochb8e0da22011-05-16 14:20:40 +01001873// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001874// the temp registers, but not the input. Only input and temp2 may alias.
1875void LCodeGen::EmitClassOfTest(Label* is_true,
1876 Label* is_false,
1877 Handle<String>class_name,
1878 Register input,
1879 Register temp,
1880 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001881 ASSERT(!input.is(temp));
1882 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001883 __ JumpIfSmi(input, is_false);
1884 __ CompareObjectType(input, temp, temp2, FIRST_SPEC_OBJECT_TYPE);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001885 __ b(lt, is_false);
1886
1887 // Map is now in temp.
1888 // Functions have class 'Function'.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001889 __ CompareInstanceType(temp, temp2, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001890 if (class_name->IsEqualTo(CStrVector("Function"))) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001891 __ b(ge, is_true);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001892 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001893 __ b(ge, is_false);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001894 }
1895
1896 // Check if the constructor in the map is a function.
1897 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1898
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001899 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type and
1900 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1901 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1902 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1903 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1904 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001905
1906 // Objects with a non-function constructor have class 'Object'.
1907 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1908 if (class_name->IsEqualTo(CStrVector("Object"))) {
1909 __ b(ne, is_true);
1910 } else {
1911 __ b(ne, is_false);
1912 }
1913
1914 // temp now contains the constructor function. Grab the
1915 // instance class name from there.
1916 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1917 __ ldr(temp, FieldMemOperand(temp,
1918 SharedFunctionInfo::kInstanceClassNameOffset));
1919 // The class name we are testing against is a symbol because it's a literal.
1920 // The name in the constructor is a symbol because of the way the context is
1921 // booted. This routine isn't expected to work for random API-created
1922 // classes and it doesn't have to because you can't access it with natives
1923 // syntax. Since both sides are symbols it is sufficient to use an identity
1924 // comparison.
1925 __ cmp(temp, Operand(class_name));
1926 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001927}
1928
1929
Ben Murdochb0fe1622011-05-05 13:52:32 +01001930void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001931 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001932 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001933 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001934 Handle<String> class_name = instr->hydrogen()->class_name();
1935
1936 int true_block = chunk_->LookupDestination(instr->true_block_id());
1937 int false_block = chunk_->LookupDestination(instr->false_block_id());
1938
1939 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1940 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1941
1942 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1943
1944 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001945}
1946
1947
1948void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001949 Register reg = ToRegister(instr->InputAt(0));
1950 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001951 int true_block = instr->true_block_id();
1952 int false_block = instr->false_block_id();
1953
1954 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1955 __ cmp(temp, Operand(instr->map()));
1956 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001957}
1958
1959
1960void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001961 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1962 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01001963
Ben Murdochb0fe1622011-05-05 13:52:32 +01001964 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1965 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1966
Steve Block44f0eee2011-05-26 01:26:41 +01001967 __ cmp(r0, Operand(0));
1968 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
1969 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001970}
1971
1972
Ben Murdoch086aeea2011-05-13 15:57:08 +01001973void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001974 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1975 public:
1976 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1977 LInstanceOfKnownGlobal* instr)
1978 : LDeferredCode(codegen), instr_(instr) { }
1979 virtual void Generate() {
1980 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1981 }
1982
1983 Label* map_check() { return &map_check_; }
1984
1985 private:
1986 LInstanceOfKnownGlobal* instr_;
1987 Label map_check_;
1988 };
1989
1990 DeferredInstanceOfKnownGlobal* deferred;
1991 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1992
1993 Label done, false_result;
1994 Register object = ToRegister(instr->InputAt(0));
1995 Register temp = ToRegister(instr->TempAt(0));
1996 Register result = ToRegister(instr->result());
1997
1998 ASSERT(object.is(r0));
1999 ASSERT(result.is(r0));
2000
2001 // A Smi is not instance of anything.
2002 __ JumpIfSmi(object, &false_result);
2003
2004 // This is the inlined call site instanceof cache. The two occurences of the
2005 // hole value will be patched to the last map/result pair generated by the
2006 // instanceof stub.
2007 Label cache_miss;
2008 Register map = temp;
2009 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2010 __ bind(deferred->map_check()); // Label for calculating code patching.
2011 // We use Factory::the_hole_value() on purpose instead of loading from the
2012 // root array to force relocation to be able to later patch with
2013 // the cached map.
Steve Block44f0eee2011-05-26 01:26:41 +01002014 __ mov(ip, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002015 __ cmp(map, Operand(ip));
2016 __ b(ne, &cache_miss);
2017 // We use Factory::the_hole_value() on purpose instead of loading from the
2018 // root array to force relocation to be able to later patch
2019 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002020 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002021 __ b(&done);
2022
2023 // The inlined call site cache did not match. Check null and string before
2024 // calling the deferred code.
2025 __ bind(&cache_miss);
2026 // Null is not instance of anything.
2027 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2028 __ cmp(object, Operand(ip));
2029 __ b(eq, &false_result);
2030
2031 // String values is not instance of anything.
2032 Condition is_string = masm_->IsObjectStringType(object, temp);
2033 __ b(is_string, &false_result);
2034
2035 // Go to the deferred code.
2036 __ b(deferred->entry());
2037
2038 __ bind(&false_result);
2039 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2040
2041 // Here result has either true or false. Deferred code also produces true or
2042 // false object.
2043 __ bind(deferred->exit());
2044 __ bind(&done);
2045}
2046
2047
2048void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2049 Label* map_check) {
2050 Register result = ToRegister(instr->result());
2051 ASSERT(result.is(r0));
2052
2053 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2054 flags = static_cast<InstanceofStub::Flags>(
2055 flags | InstanceofStub::kArgsInRegisters);
2056 flags = static_cast<InstanceofStub::Flags>(
2057 flags | InstanceofStub::kCallSiteInlineCheck);
2058 flags = static_cast<InstanceofStub::Flags>(
2059 flags | InstanceofStub::kReturnTrueFalseObject);
2060 InstanceofStub stub(flags);
2061
Ben Murdoch8b112d22011-06-08 16:22:53 +01002062 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002063
2064 // Get the temp register reserved by the instruction. This needs to be r4 as
2065 // its slot of the pushing of safepoint registers is used to communicate the
2066 // offset to the location of the map check.
2067 Register temp = ToRegister(instr->TempAt(0));
2068 ASSERT(temp.is(r4));
2069 __ mov(InstanceofStub::right(), Operand(instr->function()));
2070 static const int kAdditionalDelta = 4;
2071 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2072 Label before_push_delta;
2073 __ bind(&before_push_delta);
2074 __ BlockConstPoolFor(kAdditionalDelta);
2075 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002076 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002077 CallCodeGeneric(stub.GetCode(),
2078 RelocInfo::CODE_TARGET,
2079 instr,
2080 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Steve Block1e0659c2011-05-24 12:43:12 +01002081 // Put the result value into the result register slot and
2082 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002083 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002084}
2085
Ben Murdochb0fe1622011-05-05 13:52:32 +01002086
2087static Condition ComputeCompareCondition(Token::Value op) {
2088 switch (op) {
2089 case Token::EQ_STRICT:
2090 case Token::EQ:
2091 return eq;
2092 case Token::LT:
2093 return lt;
2094 case Token::GT:
2095 return gt;
2096 case Token::LTE:
2097 return le;
2098 case Token::GTE:
2099 return ge;
2100 default:
2101 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002102 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002103 }
2104}
2105
2106
2107void LCodeGen::DoCmpT(LCmpT* instr) {
2108 Token::Value op = instr->op();
2109
2110 Handle<Code> ic = CompareIC::GetUninitialized(op);
2111 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002112 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002113
2114 Condition condition = ComputeCompareCondition(op);
2115 if (op == Token::GT || op == Token::LTE) {
2116 condition = ReverseCondition(condition);
2117 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002118 __ LoadRoot(ToRegister(instr->result()),
2119 Heap::kTrueValueRootIndex,
2120 condition);
2121 __ LoadRoot(ToRegister(instr->result()),
2122 Heap::kFalseValueRootIndex,
2123 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002124}
2125
2126
Ben Murdochb0fe1622011-05-05 13:52:32 +01002127void LCodeGen::DoReturn(LReturn* instr) {
2128 if (FLAG_trace) {
2129 // Push the return value on the stack as the parameter.
2130 // Runtime::TraceExit returns its parameter in r0.
2131 __ push(r0);
2132 __ CallRuntime(Runtime::kTraceExit, 1);
2133 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002134 int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002135 __ mov(sp, fp);
2136 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2137 __ add(sp, sp, Operand(sp_delta));
2138 __ Jump(lr);
2139}
2140
2141
Ben Murdoch8b112d22011-06-08 16:22:53 +01002142void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002143 Register result = ToRegister(instr->result());
2144 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2145 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2146 if (instr->hydrogen()->check_hole_value()) {
2147 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2148 __ cmp(result, ip);
2149 DeoptimizeIf(eq, instr->environment());
2150 }
2151}
2152
2153
Ben Murdoch8b112d22011-06-08 16:22:53 +01002154void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2155 ASSERT(ToRegister(instr->global_object()).is(r0));
2156 ASSERT(ToRegister(instr->result()).is(r0));
2157
2158 __ mov(r2, Operand(instr->name()));
2159 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2160 : RelocInfo::CODE_TARGET_CONTEXT;
2161 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2162 CallCode(ic, mode, instr);
2163}
2164
2165
2166void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002167 Register value = ToRegister(instr->InputAt(0));
2168 Register scratch = scratch0();
2169
2170 // Load the cell.
2171 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2172
2173 // If the cell we are storing to contains the hole it could have
2174 // been deleted from the property dictionary. In that case, we need
2175 // to update the property details in the property dictionary to mark
2176 // it as no longer deleted.
2177 if (instr->hydrogen()->check_hole_value()) {
2178 Register scratch2 = ToRegister(instr->TempAt(0));
2179 __ ldr(scratch2,
2180 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2181 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2182 __ cmp(scratch2, ip);
2183 DeoptimizeIf(eq, instr->environment());
2184 }
2185
2186 // Store the value.
2187 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002188}
2189
2190
Ben Murdoch8b112d22011-06-08 16:22:53 +01002191void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2192 ASSERT(ToRegister(instr->global_object()).is(r1));
2193 ASSERT(ToRegister(instr->value()).is(r0));
2194
2195 __ mov(r2, Operand(instr->name()));
2196 Handle<Code> ic = instr->strict_mode()
2197 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2198 : isolate()->builtins()->StoreIC_Initialize();
2199 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2200}
2201
2202
Ben Murdochb8e0da22011-05-16 14:20:40 +01002203void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002204 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002205 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002206 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002207}
2208
2209
Steve Block1e0659c2011-05-24 12:43:12 +01002210void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2211 Register context = ToRegister(instr->context());
2212 Register value = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002213 __ str(value, ContextOperand(context, instr->slot_index()));
2214 if (instr->needs_write_barrier()) {
2215 int offset = Context::SlotOffset(instr->slot_index());
2216 __ RecordWrite(context, Operand(offset), value, scratch0());
2217 }
2218}
2219
2220
Ben Murdochb0fe1622011-05-05 13:52:32 +01002221void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002222 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002223 Register result = ToRegister(instr->result());
2224 if (instr->hydrogen()->is_in_object()) {
2225 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2226 } else {
2227 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2228 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2229 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002230}
2231
2232
Ben Murdoch257744e2011-11-30 15:57:28 +00002233void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2234 Register object,
2235 Handle<Map> type,
2236 Handle<String> name) {
Steve Block44f0eee2011-05-26 01:26:41 +01002237 LookupResult lookup;
2238 type->LookupInDescriptors(NULL, *name, &lookup);
Ben Murdoch257744e2011-11-30 15:57:28 +00002239 ASSERT(lookup.IsProperty() &&
2240 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2241 if (lookup.type() == FIELD) {
2242 int index = lookup.GetLocalFieldIndexFromMap(*type);
2243 int offset = index * kPointerSize;
2244 if (index < 0) {
2245 // Negative property indices are in-object properties, indexed
2246 // from the end of the fixed part of the object.
2247 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2248 } else {
2249 // Non-negative property indices are in the properties array.
2250 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2251 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2252 }
Steve Block44f0eee2011-05-26 01:26:41 +01002253 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002254 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
2255 LoadHeapObject(result, Handle<HeapObject>::cast(function));
Steve Block44f0eee2011-05-26 01:26:41 +01002256 }
2257}
2258
2259
2260void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2261 Register object = ToRegister(instr->object());
2262 Register result = ToRegister(instr->result());
2263 Register scratch = scratch0();
2264 int map_count = instr->hydrogen()->types()->length();
2265 Handle<String> name = instr->hydrogen()->name();
2266 if (map_count == 0) {
2267 ASSERT(instr->hydrogen()->need_generic());
2268 __ mov(r2, Operand(name));
2269 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2270 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2271 } else {
2272 Label done;
2273 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2274 for (int i = 0; i < map_count - 1; ++i) {
2275 Handle<Map> map = instr->hydrogen()->types()->at(i);
2276 Label next;
2277 __ cmp(scratch, Operand(map));
2278 __ b(ne, &next);
Ben Murdoch257744e2011-11-30 15:57:28 +00002279 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002280 __ b(&done);
2281 __ bind(&next);
2282 }
2283 Handle<Map> map = instr->hydrogen()->types()->last();
2284 __ cmp(scratch, Operand(map));
2285 if (instr->hydrogen()->need_generic()) {
2286 Label generic;
2287 __ b(ne, &generic);
Ben Murdoch257744e2011-11-30 15:57:28 +00002288 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002289 __ b(&done);
2290 __ bind(&generic);
2291 __ mov(r2, Operand(name));
2292 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2293 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2294 } else {
2295 DeoptimizeIf(ne, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002296 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002297 }
2298 __ bind(&done);
2299 }
2300}
2301
2302
Ben Murdochb0fe1622011-05-05 13:52:32 +01002303void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2304 ASSERT(ToRegister(instr->object()).is(r0));
2305 ASSERT(ToRegister(instr->result()).is(r0));
2306
2307 // Name is always in r2.
2308 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002309 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002310 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2311}
2312
2313
Steve Block9fac8402011-05-12 15:51:54 +01002314void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2315 Register scratch = scratch0();
2316 Register function = ToRegister(instr->function());
2317 Register result = ToRegister(instr->result());
2318
2319 // Check that the function really is a function. Load map into the
2320 // result register.
2321 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2322 DeoptimizeIf(ne, instr->environment());
2323
2324 // Make sure that the function has an instance prototype.
2325 Label non_instance;
2326 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2327 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2328 __ b(ne, &non_instance);
2329
2330 // Get the prototype or initial map from the function.
2331 __ ldr(result,
2332 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2333
2334 // Check that the function has a prototype or an initial map.
2335 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2336 __ cmp(result, ip);
2337 DeoptimizeIf(eq, instr->environment());
2338
2339 // If the function does not have an initial map, we're done.
2340 Label done;
2341 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2342 __ b(ne, &done);
2343
2344 // Get the prototype from the initial map.
2345 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2346 __ jmp(&done);
2347
2348 // Non-instance prototype: Fetch prototype from constructor field
2349 // in initial map.
2350 __ bind(&non_instance);
2351 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2352
2353 // All done.
2354 __ bind(&done);
2355}
2356
2357
Ben Murdochb0fe1622011-05-05 13:52:32 +01002358void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002359 Register result = ToRegister(instr->result());
2360 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002361 Register scratch = scratch0();
2362
Steve Block1e0659c2011-05-24 12:43:12 +01002363 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002364 if (FLAG_debug_code) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002365 Label done, fail;
Steve Block1e0659c2011-05-24 12:43:12 +01002366 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002367 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2368 __ cmp(scratch, ip);
2369 __ b(eq, &done);
2370 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2371 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002372 __ b(eq, &done);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002373 // |scratch| still contains |input|'s map.
2374 __ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
2375 __ ubfx(scratch, scratch, Map::kElementsKindShift,
2376 Map::kElementsKindBitCount);
2377 __ cmp(scratch, Operand(JSObject::FAST_ELEMENTS));
2378 __ b(eq, &done);
2379 __ cmp(scratch, Operand(JSObject::FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2380 __ b(lt, &fail);
2381 __ cmp(scratch, Operand(JSObject::LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2382 __ b(le, &done);
2383 __ bind(&fail);
2384 __ Abort("Check for fast or external elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002385 __ bind(&done);
2386 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002387}
2388
2389
Steve Block44f0eee2011-05-26 01:26:41 +01002390void LCodeGen::DoLoadExternalArrayPointer(
2391 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002392 Register to_reg = ToRegister(instr->result());
2393 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002394 __ ldr(to_reg, FieldMemOperand(from_reg,
2395 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002396}
2397
2398
Ben Murdochb0fe1622011-05-05 13:52:32 +01002399void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002400 Register arguments = ToRegister(instr->arguments());
2401 Register length = ToRegister(instr->length());
2402 Register index = ToRegister(instr->index());
2403 Register result = ToRegister(instr->result());
2404
2405 // Bailout index is not a valid argument index. Use unsigned check to get
2406 // negative check for free.
2407 __ sub(length, length, index, SetCC);
2408 DeoptimizeIf(ls, instr->environment());
2409
2410 // There are two words between the frame pointer and the last argument.
2411 // Subtracting from length accounts for one of them add one more.
2412 __ add(length, length, Operand(1));
2413 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002414}
2415
2416
2417void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002418 Register elements = ToRegister(instr->elements());
2419 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002420 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002421 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01002422
2423 // Load the result.
2424 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2425 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2426
Ben Murdochb8e0da22011-05-16 14:20:40 +01002427 // Check for the hole value.
Ben Murdoch257744e2011-11-30 15:57:28 +00002428 if (instr->hydrogen()->RequiresHoleCheck()) {
2429 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2430 __ cmp(result, scratch);
2431 DeoptimizeIf(eq, instr->environment());
2432 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002433}
2434
2435
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002436void LCodeGen::DoLoadKeyedFastDoubleElement(
2437 LLoadKeyedFastDoubleElement* instr) {
2438 Register elements = ToRegister(instr->elements());
2439 bool key_is_constant = instr->key()->IsConstantOperand();
2440 Register key = no_reg;
2441 DwVfpRegister result = ToDoubleRegister(instr->result());
2442 Register scratch = scratch0();
2443
2444 int shift_size =
2445 ElementsKindToShiftSize(JSObject::FAST_DOUBLE_ELEMENTS);
2446 int constant_key = 0;
2447 if (key_is_constant) {
2448 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2449 if (constant_key & 0xF0000000) {
2450 Abort("array index constant value too big.");
2451 }
2452 } else {
2453 key = ToRegister(instr->key());
2454 }
2455
2456 Operand operand = key_is_constant
2457 ? Operand(constant_key * (1 << shift_size) +
2458 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
2459 : Operand(key, LSL, shift_size);
2460 __ add(elements, elements, operand);
2461 if (!key_is_constant) {
2462 __ add(elements, elements,
2463 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2464 }
2465
2466 if (instr->hydrogen()->RequiresHoleCheck()) {
2467 // TODO(danno): If no hole check is required, there is no need to allocate
2468 // elements into a temporary register, instead scratch can be used.
2469 __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2470 __ cmp(scratch, Operand(kHoleNanUpper32));
2471 DeoptimizeIf(eq, instr->environment());
2472 }
2473
2474 __ vldr(result, elements, 0);
2475}
2476
2477
Steve Block44f0eee2011-05-26 01:26:41 +01002478void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2479 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002480 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00002481 Register key = no_reg;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002482 JSObject::ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00002483 bool key_is_constant = instr->key()->IsConstantOperand();
2484 int constant_key = 0;
2485 if (key_is_constant) {
2486 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2487 if (constant_key & 0xF0000000) {
2488 Abort("array index constant value too big.");
2489 }
2490 } else {
2491 key = ToRegister(instr->key());
2492 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002493 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00002494
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002495 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS ||
2496 elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002497 CpuFeatures::Scope scope(VFP3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002498 DwVfpRegister result = ToDoubleRegister(instr->result());
2499 Operand operand = key_is_constant
2500 ? Operand(constant_key * (1 << shift_size))
2501 : Operand(key, LSL, shift_size);
Ben Murdoch257744e2011-11-30 15:57:28 +00002502 __ add(scratch0(), external_pointer, operand);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002503 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002504 __ vldr(result.low(), scratch0(), 0);
2505 __ vcvt_f64_f32(result, result.low());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002506 } else { // i.e. elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00002507 __ vldr(result, scratch0(), 0);
2508 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01002509 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002510 Register result = ToRegister(instr->result());
Ben Murdoch257744e2011-11-30 15:57:28 +00002511 MemOperand mem_operand(key_is_constant
2512 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
2513 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002514 switch (elements_kind) {
2515 case JSObject::EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002516 __ ldrsb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002517 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002518 case JSObject::EXTERNAL_PIXEL_ELEMENTS:
2519 case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002520 __ ldrb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002521 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002522 case JSObject::EXTERNAL_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002523 __ ldrsh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002524 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002525 case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002526 __ ldrh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002527 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002528 case JSObject::EXTERNAL_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002529 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002530 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002531 case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002532 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002533 __ cmp(result, Operand(0x80000000));
2534 // TODO(danno): we could be more clever here, perhaps having a special
2535 // version of the stub that detects if the overflow case actually
2536 // happens, and generate code that returns a double rather than int.
2537 DeoptimizeIf(cs, instr->environment());
2538 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002539 case JSObject::EXTERNAL_FLOAT_ELEMENTS:
2540 case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
2541 case JSObject::FAST_DOUBLE_ELEMENTS:
2542 case JSObject::FAST_ELEMENTS:
2543 case JSObject::DICTIONARY_ELEMENTS:
2544 case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01002545 UNREACHABLE();
2546 break;
2547 }
2548 }
Steve Block1e0659c2011-05-24 12:43:12 +01002549}
2550
2551
Ben Murdochb0fe1622011-05-05 13:52:32 +01002552void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2553 ASSERT(ToRegister(instr->object()).is(r1));
2554 ASSERT(ToRegister(instr->key()).is(r0));
2555
Steve Block44f0eee2011-05-26 01:26:41 +01002556 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002557 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2558}
2559
2560
2561void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002562 Register scratch = scratch0();
2563 Register result = ToRegister(instr->result());
2564
2565 // Check if the calling frame is an arguments adaptor frame.
2566 Label done, adapted;
2567 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2568 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2569 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2570
2571 // Result is the frame pointer for the frame if not adapted and for the real
2572 // frame below the adaptor frame if adapted.
2573 __ mov(result, fp, LeaveCC, ne);
2574 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002575}
2576
2577
2578void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002579 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002580 Register result = ToRegister(instr->result());
2581
2582 Label done;
2583
2584 // If no arguments adaptor frame the number of arguments is fixed.
2585 __ cmp(fp, elem);
2586 __ mov(result, Operand(scope()->num_parameters()));
2587 __ b(eq, &done);
2588
2589 // Arguments adaptor frame present. Get argument length from there.
2590 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2591 __ ldr(result,
2592 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2593 __ SmiUntag(result);
2594
2595 // Argument length is in result register.
2596 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002597}
2598
2599
2600void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002601 Register receiver = ToRegister(instr->receiver());
2602 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002603 Register length = ToRegister(instr->length());
2604 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002605 Register scratch = scratch0();
2606 ASSERT(receiver.is(r0)); // Used for parameter count.
2607 ASSERT(function.is(r1)); // Required by InvokeFunction.
2608 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002609
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002610 // If the receiver is null or undefined, we have to pass the global
2611 // object as a receiver to normal functions. Values have to be
2612 // passed unchanged to builtins and strict-mode functions.
Steve Block1e0659c2011-05-24 12:43:12 +01002613 Label global_object, receiver_ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002614
2615 // Do not transform the receiver to object for strict mode
2616 // functions.
2617 __ ldr(scratch,
2618 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2619 __ ldr(scratch,
2620 FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
2621 __ tst(scratch,
2622 Operand(1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize)));
2623 __ b(ne, &receiver_ok);
2624
2625 // Do not transform the receiver to object for builtins.
2626 __ tst(scratch, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
2627 __ b(ne, &receiver_ok);
2628
2629 // Normal function. Replace undefined or null with global receiver.
Steve Block1e0659c2011-05-24 12:43:12 +01002630 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2631 __ cmp(receiver, scratch);
2632 __ b(eq, &global_object);
2633 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2634 __ cmp(receiver, scratch);
2635 __ b(eq, &global_object);
2636
2637 // Deoptimize if the receiver is not a JS object.
2638 __ tst(receiver, Operand(kSmiTagMask));
2639 DeoptimizeIf(eq, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002640 __ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE);
2641 DeoptimizeIf(lt, instr->environment());
Steve Block1e0659c2011-05-24 12:43:12 +01002642 __ jmp(&receiver_ok);
2643
2644 __ bind(&global_object);
2645 __ ldr(receiver, GlobalObjectOperand());
Ben Murdoch257744e2011-11-30 15:57:28 +00002646 __ ldr(receiver,
2647 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002648 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002649
2650 // Copy the arguments to this function possibly from the
2651 // adaptor frame below it.
2652 const uint32_t kArgumentsLimit = 1 * KB;
2653 __ cmp(length, Operand(kArgumentsLimit));
2654 DeoptimizeIf(hi, instr->environment());
2655
2656 // Push the receiver and use the register to keep the original
2657 // number of arguments.
2658 __ push(receiver);
2659 __ mov(receiver, length);
2660 // The arguments are at a one pointer size offset from elements.
2661 __ add(elements, elements, Operand(1 * kPointerSize));
2662
2663 // Loop through the arguments pushing them onto the execution
2664 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002665 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002666 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002667 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002668 __ b(eq, &invoke);
2669 __ bind(&loop);
2670 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2671 __ push(scratch);
2672 __ sub(length, length, Operand(1), SetCC);
2673 __ b(ne, &loop);
2674
2675 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002676 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2677 LPointerMap* pointers = instr->pointer_map();
2678 LEnvironment* env = instr->deoptimization_environment();
2679 RecordPosition(pointers->position());
2680 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002681 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002682 pointers,
2683 env->deoptimization_index());
2684 // The number of arguments is stored in receiver which is r0, as expected
2685 // by InvokeFunction.
2686 v8::internal::ParameterCount actual(receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00002687 __ InvokeFunction(function, actual, CALL_FUNCTION,
2688 safepoint_generator, CALL_AS_METHOD);
Steve Block1e0659c2011-05-24 12:43:12 +01002689 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002690}
2691
2692
2693void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002694 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002695 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2696 Abort("DoPushArgument not implemented for double type.");
2697 } else {
2698 Register argument_reg = EmitLoadRegister(argument, ip);
2699 __ push(argument_reg);
2700 }
2701}
2702
2703
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002704void LCodeGen::DoThisFunction(LThisFunction* instr) {
2705 Register result = ToRegister(instr->result());
2706 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2707}
2708
2709
Steve Block1e0659c2011-05-24 12:43:12 +01002710void LCodeGen::DoContext(LContext* instr) {
2711 Register result = ToRegister(instr->result());
2712 __ mov(result, cp);
2713}
2714
2715
2716void LCodeGen::DoOuterContext(LOuterContext* instr) {
2717 Register context = ToRegister(instr->context());
2718 Register result = ToRegister(instr->result());
2719 __ ldr(result,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002720 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block1e0659c2011-05-24 12:43:12 +01002721}
2722
2723
Ben Murdochb0fe1622011-05-05 13:52:32 +01002724void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002725 Register context = ToRegister(instr->context());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002726 Register result = ToRegister(instr->result());
2727 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2728}
2729
2730
2731void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002732 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002733 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002734 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002735}
2736
2737
2738void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2739 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00002740 LInstruction* instr,
2741 CallKind call_kind) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002742 // Change context if needed.
2743 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002744 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002745 scope()->contains_with() ||
2746 (scope()->num_heap_slots() > 0);
2747 if (change_context) {
2748 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2749 }
2750
2751 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2752 // is available to write to at this point.
2753 if (!function->NeedsArgumentsAdaption()) {
2754 __ mov(r0, Operand(arity));
2755 }
2756
2757 LPointerMap* pointers = instr->pointer_map();
2758 RecordPosition(pointers->position());
2759
2760 // Invoke function.
Ben Murdoch257744e2011-11-30 15:57:28 +00002761 __ SetCallKind(r5, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002762 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2763 __ Call(ip);
2764
2765 // Setup deoptimization.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002766 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002767
2768 // Restore context.
2769 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2770}
2771
2772
2773void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002774 ASSERT(ToRegister(instr->result()).is(r0));
2775 __ mov(r1, Operand(instr->function()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002776 CallKnownFunction(instr->function(),
2777 instr->arity(),
2778 instr,
2779 CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002780}
2781
2782
2783void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002784 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002785 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002786 Register scratch = scratch0();
2787
2788 // Deoptimize if not a heap number.
2789 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2790 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2791 __ cmp(scratch, Operand(ip));
2792 DeoptimizeIf(ne, instr->environment());
2793
2794 Label done;
2795 Register exponent = scratch0();
2796 scratch = no_reg;
2797 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2798 // Check the sign of the argument. If the argument is positive, just
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002799 // return it.
Steve Block1e0659c2011-05-24 12:43:12 +01002800 __ tst(exponent, Operand(HeapNumber::kSignMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002801 // Move the input to the result if necessary.
2802 __ Move(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01002803 __ b(eq, &done);
2804
2805 // Input is negative. Reverse its sign.
2806 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002807 {
2808 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002809
Ben Murdoch8b112d22011-06-08 16:22:53 +01002810 // Registers were saved at the safepoint, so we can use
2811 // many scratch registers.
2812 Register tmp1 = input.is(r1) ? r0 : r1;
2813 Register tmp2 = input.is(r2) ? r0 : r2;
2814 Register tmp3 = input.is(r3) ? r0 : r3;
2815 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01002816
Ben Murdoch8b112d22011-06-08 16:22:53 +01002817 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01002818
Ben Murdoch8b112d22011-06-08 16:22:53 +01002819 Label allocated, slow;
2820 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2821 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2822 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01002823
Ben Murdoch8b112d22011-06-08 16:22:53 +01002824 // Slow case: Call the runtime system to do the number allocation.
2825 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01002826
Ben Murdoch8b112d22011-06-08 16:22:53 +01002827 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2828 // Set the pointer to the new heap number in tmp.
2829 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2830 // Restore input_reg after call to runtime.
2831 __ LoadFromSafepointRegisterSlot(input, input);
2832 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002833
Ben Murdoch8b112d22011-06-08 16:22:53 +01002834 __ bind(&allocated);
2835 // exponent: floating point exponent value.
2836 // tmp1: allocated heap number.
2837 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2838 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2839 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2840 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002841
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002842 __ StoreToSafepointRegisterSlot(tmp1, result);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002843 }
Steve Block1e0659c2011-05-24 12:43:12 +01002844
2845 __ bind(&done);
2846}
2847
2848
2849void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2850 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002851 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002852 __ cmp(input, Operand(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002853 __ Move(result, input, pl);
Steve Block1e0659c2011-05-24 12:43:12 +01002854 // We can make rsb conditional because the previous cmp instruction
2855 // will clear the V (overflow) flag and rsb won't set this flag
2856 // if input is positive.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002857 __ rsb(result, input, Operand(0), SetCC, mi);
Steve Block1e0659c2011-05-24 12:43:12 +01002858 // Deoptimize on overflow.
2859 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002860}
2861
2862
2863void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002864 // Class for deferred case.
2865 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2866 public:
2867 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2868 LUnaryMathOperation* instr)
2869 : LDeferredCode(codegen), instr_(instr) { }
2870 virtual void Generate() {
2871 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2872 }
2873 private:
2874 LUnaryMathOperation* instr_;
2875 };
2876
Steve Block1e0659c2011-05-24 12:43:12 +01002877 Representation r = instr->hydrogen()->value()->representation();
2878 if (r.IsDouble()) {
2879 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002880 DwVfpRegister result = ToDoubleRegister(instr->result());
2881 __ vabs(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01002882 } else if (r.IsInteger32()) {
2883 EmitIntegerMathAbs(instr);
2884 } else {
2885 // Representation is tagged.
2886 DeferredMathAbsTaggedHeapNumber* deferred =
2887 new DeferredMathAbsTaggedHeapNumber(this, instr);
2888 Register input = ToRegister(instr->InputAt(0));
2889 // Smi check.
2890 __ JumpIfNotSmi(input, deferred->entry());
2891 // If smi, handle it directly.
2892 EmitIntegerMathAbs(instr);
2893 __ bind(deferred->exit());
2894 }
2895}
2896
2897
Ben Murdochb0fe1622011-05-05 13:52:32 +01002898void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002899 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002900 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002901 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002902 Register scratch1 = scratch0();
2903 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002904
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002905 __ EmitVFPTruncate(kRoundToMinusInf,
2906 single_scratch,
2907 input,
2908 scratch1,
2909 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002910 DeoptimizeIf(ne, instr->environment());
2911
2912 // Move the result back to general purpose register r0.
2913 __ vmov(result, single_scratch);
2914
Steve Block44f0eee2011-05-26 01:26:41 +01002915 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2916 // Test for -0.
2917 Label done;
2918 __ cmp(result, Operand(0));
2919 __ b(ne, &done);
2920 __ vmov(scratch1, input.high());
2921 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2922 DeoptimizeIf(ne, instr->environment());
2923 __ bind(&done);
2924 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002925}
2926
2927
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002928void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2929 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2930 Register result = ToRegister(instr->result());
Ben Murdoch257744e2011-11-30 15:57:28 +00002931 Register scratch1 = result;
2932 Register scratch2 = scratch0();
2933 Label done, check_sign_on_zero;
2934
2935 // Extract exponent bits.
2936 __ vmov(scratch1, input.high());
2937 __ ubfx(scratch2,
2938 scratch1,
2939 HeapNumber::kExponentShift,
2940 HeapNumber::kExponentBits);
2941
2942 // If the number is in ]-0.5, +0.5[, the result is +/- 0.
2943 __ cmp(scratch2, Operand(HeapNumber::kExponentBias - 2));
2944 __ mov(result, Operand(0), LeaveCC, le);
2945 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2946 __ b(le, &check_sign_on_zero);
2947 } else {
2948 __ b(le, &done);
2949 }
2950
2951 // The following conversion will not work with numbers
2952 // outside of ]-2^32, 2^32[.
2953 __ cmp(scratch2, Operand(HeapNumber::kExponentBias + 32));
2954 DeoptimizeIf(ge, instr->environment());
2955
2956 // Save the original sign for later comparison.
2957 __ and_(scratch2, scratch1, Operand(HeapNumber::kSignMask));
2958
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002959 __ Vmov(double_scratch0(), 0.5);
Ben Murdoch257744e2011-11-30 15:57:28 +00002960 __ vadd(input, input, double_scratch0());
2961
2962 // Check sign of the result: if the sign changed, the input
2963 // value was in ]0.5, 0[ and the result should be -0.
2964 __ vmov(scratch1, input.high());
2965 __ eor(scratch1, scratch1, Operand(scratch2), SetCC);
2966 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2967 DeoptimizeIf(mi, instr->environment());
2968 } else {
2969 __ mov(result, Operand(0), LeaveCC, mi);
2970 __ b(mi, &done);
2971 }
2972
2973 __ EmitVFPTruncate(kRoundToMinusInf,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002974 double_scratch0().low(),
2975 input,
2976 scratch1,
2977 scratch2);
2978 DeoptimizeIf(ne, instr->environment());
2979 __ vmov(result, double_scratch0().low());
2980
Steve Block44f0eee2011-05-26 01:26:41 +01002981 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2982 // Test for -0.
Steve Block44f0eee2011-05-26 01:26:41 +01002983 __ cmp(result, Operand(0));
2984 __ b(ne, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00002985 __ bind(&check_sign_on_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01002986 __ vmov(scratch1, input.high());
2987 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2988 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01002989 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002990 __ bind(&done);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002991}
2992
2993
Ben Murdochb0fe1622011-05-05 13:52:32 +01002994void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002995 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002996 DoubleRegister result = ToDoubleRegister(instr->result());
2997 __ vsqrt(result, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002998}
2999
3000
Steve Block44f0eee2011-05-26 01:26:41 +01003001void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3002 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003003 DoubleRegister result = ToDoubleRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +01003004 // Add +0 to convert -0 to +0.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003005 __ vadd(result, input, kDoubleRegZero);
3006 __ vsqrt(result, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003007}
3008
3009
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003010void LCodeGen::DoPower(LPower* instr) {
3011 LOperand* left = instr->InputAt(0);
3012 LOperand* right = instr->InputAt(1);
3013 Register scratch = scratch0();
3014 DoubleRegister result_reg = ToDoubleRegister(instr->result());
3015 Representation exponent_type = instr->hydrogen()->right()->representation();
3016 if (exponent_type.IsDouble()) {
3017 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003018 __ PrepareCallCFunction(0, 2, scratch);
3019 __ SetCallCDoubleArguments(ToDoubleRegister(left),
3020 ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003021 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003022 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003023 } else if (exponent_type.IsInteger32()) {
3024 ASSERT(ToRegister(right).is(r0));
3025 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003026 __ PrepareCallCFunction(1, 1, scratch);
3027 __ SetCallCDoubleArguments(ToDoubleRegister(left), ToRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003028 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003029 ExternalReference::power_double_int_function(isolate()), 1, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003030 } else {
3031 ASSERT(exponent_type.IsTagged());
3032 ASSERT(instr->hydrogen()->left()->representation().IsDouble());
3033
3034 Register right_reg = ToRegister(right);
3035
3036 // Check for smi on the right hand side.
3037 Label non_smi, call;
3038 __ JumpIfNotSmi(right_reg, &non_smi);
3039
3040 // Untag smi and convert it to a double.
3041 __ SmiUntag(right_reg);
3042 SwVfpRegister single_scratch = double_scratch0().low();
3043 __ vmov(single_scratch, right_reg);
3044 __ vcvt_f64_s32(result_reg, single_scratch);
3045 __ jmp(&call);
3046
3047 // Heap number map check.
3048 __ bind(&non_smi);
3049 __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
3050 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3051 __ cmp(scratch, Operand(ip));
3052 DeoptimizeIf(ne, instr->environment());
3053 int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
3054 __ add(scratch, right_reg, Operand(value_offset));
3055 __ vldr(result_reg, scratch, 0);
3056
3057 // Prepare arguments and call C function.
3058 __ bind(&call);
Ben Murdoch257744e2011-11-30 15:57:28 +00003059 __ PrepareCallCFunction(0, 2, scratch);
3060 __ SetCallCDoubleArguments(ToDoubleRegister(left), result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01003061 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003062 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003063 }
3064 // Store the result in the result register.
3065 __ GetCFunctionDoubleResult(result_reg);
3066}
3067
3068
3069void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3070 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3071 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3072 TranscendentalCacheStub::UNTAGGED);
3073 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3074}
3075
3076
3077void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3078 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3079 TranscendentalCacheStub stub(TranscendentalCache::COS,
3080 TranscendentalCacheStub::UNTAGGED);
3081 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3082}
3083
3084
3085void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3086 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3087 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3088 TranscendentalCacheStub::UNTAGGED);
3089 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3090}
3091
3092
Ben Murdochb0fe1622011-05-05 13:52:32 +01003093void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3094 switch (instr->op()) {
3095 case kMathAbs:
3096 DoMathAbs(instr);
3097 break;
3098 case kMathFloor:
3099 DoMathFloor(instr);
3100 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003101 case kMathRound:
3102 DoMathRound(instr);
3103 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003104 case kMathSqrt:
3105 DoMathSqrt(instr);
3106 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003107 case kMathPowHalf:
3108 DoMathPowHalf(instr);
3109 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003110 case kMathCos:
3111 DoMathCos(instr);
3112 break;
3113 case kMathSin:
3114 DoMathSin(instr);
3115 break;
3116 case kMathLog:
3117 DoMathLog(instr);
3118 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003119 default:
3120 Abort("Unimplemented type of LUnaryMathOperation.");
3121 UNREACHABLE();
3122 }
3123}
3124
3125
Ben Murdoch257744e2011-11-30 15:57:28 +00003126void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3127 ASSERT(ToRegister(instr->function()).is(r1));
3128 ASSERT(instr->HasPointerMap());
3129 ASSERT(instr->HasDeoptimizationEnvironment());
3130 LPointerMap* pointers = instr->pointer_map();
3131 LEnvironment* env = instr->deoptimization_environment();
3132 RecordPosition(pointers->position());
3133 RegisterEnvironmentForDeoptimization(env);
3134 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3135 ParameterCount count(instr->arity());
3136 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3137 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3138}
3139
3140
Ben Murdochb0fe1622011-05-05 13:52:32 +01003141void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003142 ASSERT(ToRegister(instr->result()).is(r0));
3143
3144 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003145 Handle<Code> ic =
3146 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003147 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3148 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003149}
3150
3151
3152void LCodeGen::DoCallNamed(LCallNamed* instr) {
3153 ASSERT(ToRegister(instr->result()).is(r0));
3154
3155 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003156 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3157 Handle<Code> ic =
3158 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003159 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003160 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003161 // Restore context register.
3162 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3163}
3164
3165
3166void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003167 ASSERT(ToRegister(instr->result()).is(r0));
3168
3169 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003170 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
Steve Block9fac8402011-05-12 15:51:54 +01003171 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3172 __ Drop(1);
3173 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003174}
3175
3176
3177void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003178 ASSERT(ToRegister(instr->result()).is(r0));
3179
3180 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003181 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
Steve Block44f0eee2011-05-26 01:26:41 +01003182 Handle<Code> ic =
Ben Murdoch257744e2011-11-30 15:57:28 +00003183 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003184 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003185 CallCode(ic, mode, instr);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003186 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003187}
3188
3189
3190void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3191 ASSERT(ToRegister(instr->result()).is(r0));
3192 __ mov(r1, Operand(instr->target()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003193 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003194}
3195
3196
3197void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003198 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003199 ASSERT(ToRegister(instr->result()).is(r0));
3200
Steve Block44f0eee2011-05-26 01:26:41 +01003201 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003202 __ mov(r0, Operand(instr->arity()));
3203 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3204}
3205
3206
3207void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3208 CallRuntime(instr->function(), instr->arity(), instr);
3209}
3210
3211
3212void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003213 Register object = ToRegister(instr->object());
3214 Register value = ToRegister(instr->value());
3215 Register scratch = scratch0();
3216 int offset = instr->offset();
3217
3218 ASSERT(!object.is(value));
3219
3220 if (!instr->transition().is_null()) {
3221 __ mov(scratch, Operand(instr->transition()));
3222 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3223 }
3224
3225 // Do the store.
3226 if (instr->is_in_object()) {
3227 __ str(value, FieldMemOperand(object, offset));
3228 if (instr->needs_write_barrier()) {
3229 // Update the write barrier for the object for in-object properties.
3230 __ RecordWrite(object, Operand(offset), value, scratch);
3231 }
3232 } else {
3233 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3234 __ str(value, FieldMemOperand(scratch, offset));
3235 if (instr->needs_write_barrier()) {
3236 // Update the write barrier for the properties array.
3237 // object is used as a scratch register.
3238 __ RecordWrite(scratch, Operand(offset), value, object);
3239 }
3240 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003241}
3242
3243
3244void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3245 ASSERT(ToRegister(instr->object()).is(r1));
3246 ASSERT(ToRegister(instr->value()).is(r0));
3247
3248 // Name is always in r2.
3249 __ mov(r2, Operand(instr->name()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003250 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003251 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3252 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003253 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3254}
3255
3256
3257void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003258 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003259 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003260}
3261
3262
3263void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003264 Register value = ToRegister(instr->value());
3265 Register elements = ToRegister(instr->object());
3266 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3267 Register scratch = scratch0();
3268
3269 // Do the store.
3270 if (instr->key()->IsConstantOperand()) {
3271 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3272 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3273 int offset =
3274 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3275 __ str(value, FieldMemOperand(elements, offset));
3276 } else {
3277 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3278 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3279 }
3280
3281 if (instr->hydrogen()->NeedsWriteBarrier()) {
3282 // Compute address of modified element and store it into key register.
3283 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
3284 __ RecordWrite(elements, key, value);
3285 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003286}
3287
3288
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003289void LCodeGen::DoStoreKeyedFastDoubleElement(
3290 LStoreKeyedFastDoubleElement* instr) {
3291 DwVfpRegister value = ToDoubleRegister(instr->value());
3292 Register elements = ToRegister(instr->elements());
3293 Register key = no_reg;
3294 Register scratch = scratch0();
3295 bool key_is_constant = instr->key()->IsConstantOperand();
3296 int constant_key = 0;
3297 Label not_nan;
3298
3299 // Calculate the effective address of the slot in the array to store the
3300 // double value.
3301 if (key_is_constant) {
3302 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3303 if (constant_key & 0xF0000000) {
3304 Abort("array index constant value too big.");
3305 }
3306 } else {
3307 key = ToRegister(instr->key());
3308 }
3309 int shift_size = ElementsKindToShiftSize(JSObject::FAST_DOUBLE_ELEMENTS);
3310 Operand operand = key_is_constant
3311 ? Operand(constant_key * (1 << shift_size) +
3312 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
3313 : Operand(key, LSL, shift_size);
3314 __ add(scratch, elements, operand);
3315 if (!key_is_constant) {
3316 __ add(scratch, scratch,
3317 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3318 }
3319
3320 // Check for NaN. All NaNs must be canonicalized.
3321 __ VFPCompareAndSetFlags(value, value);
3322
3323 // Only load canonical NaN if the comparison above set the overflow.
3324 __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs);
3325
3326 __ bind(&not_nan);
3327 __ vstr(value, scratch, 0);
3328}
3329
3330
Steve Block44f0eee2011-05-26 01:26:41 +01003331void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3332 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003333
3334 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00003335 Register key = no_reg;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003336 JSObject::ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00003337 bool key_is_constant = instr->key()->IsConstantOperand();
3338 int constant_key = 0;
3339 if (key_is_constant) {
3340 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3341 if (constant_key & 0xF0000000) {
3342 Abort("array index constant value too big.");
3343 }
3344 } else {
3345 key = ToRegister(instr->key());
3346 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003347 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00003348
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003349 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS ||
3350 elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003351 CpuFeatures::Scope scope(VFP3);
3352 DwVfpRegister value(ToDoubleRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003353 Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
3354 : Operand(key, LSL, shift_size));
3355 __ add(scratch0(), external_pointer, operand);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003356 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003357 __ vcvt_f32_f64(double_scratch0().low(), value);
3358 __ vstr(double_scratch0().low(), scratch0(), 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003359 } else { // i.e. elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00003360 __ vstr(value, scratch0(), 0);
3361 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003362 } else {
3363 Register value(ToRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003364 MemOperand mem_operand(key_is_constant
3365 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
3366 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003367 switch (elements_kind) {
3368 case JSObject::EXTERNAL_PIXEL_ELEMENTS:
3369 case JSObject::EXTERNAL_BYTE_ELEMENTS:
3370 case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003371 __ strb(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003372 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003373 case JSObject::EXTERNAL_SHORT_ELEMENTS:
3374 case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003375 __ strh(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003376 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003377 case JSObject::EXTERNAL_INT_ELEMENTS:
3378 case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003379 __ str(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003380 break;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003381 case JSObject::EXTERNAL_FLOAT_ELEMENTS:
3382 case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
3383 case JSObject::FAST_DOUBLE_ELEMENTS:
3384 case JSObject::FAST_ELEMENTS:
3385 case JSObject::DICTIONARY_ELEMENTS:
3386 case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01003387 UNREACHABLE();
3388 break;
3389 }
3390 }
Steve Block44f0eee2011-05-26 01:26:41 +01003391}
3392
3393
Ben Murdochb0fe1622011-05-05 13:52:32 +01003394void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3395 ASSERT(ToRegister(instr->object()).is(r2));
3396 ASSERT(ToRegister(instr->key()).is(r1));
3397 ASSERT(ToRegister(instr->value()).is(r0));
3398
Ben Murdoch8b112d22011-06-08 16:22:53 +01003399 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003400 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3401 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003402 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3403}
3404
3405
Ben Murdoch257744e2011-11-30 15:57:28 +00003406void LCodeGen::DoStringAdd(LStringAdd* instr) {
3407 __ push(ToRegister(instr->left()));
3408 __ push(ToRegister(instr->right()));
3409 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3410 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3411}
3412
3413
Steve Block1e0659c2011-05-24 12:43:12 +01003414void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3415 class DeferredStringCharCodeAt: public LDeferredCode {
3416 public:
3417 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3418 : LDeferredCode(codegen), instr_(instr) { }
3419 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3420 private:
3421 LStringCharCodeAt* instr_;
3422 };
3423
3424 Register scratch = scratch0();
3425 Register string = ToRegister(instr->string());
3426 Register index = no_reg;
3427 int const_index = -1;
3428 if (instr->index()->IsConstantOperand()) {
3429 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3430 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3431 if (!Smi::IsValid(const_index)) {
3432 // Guaranteed to be out of bounds because of the assert above.
3433 // So the bounds check that must dominate this instruction must
3434 // have deoptimized already.
3435 if (FLAG_debug_code) {
3436 __ Abort("StringCharCodeAt: out of bounds index.");
3437 }
3438 // No code needs to be generated.
3439 return;
3440 }
3441 } else {
3442 index = ToRegister(instr->index());
3443 }
3444 Register result = ToRegister(instr->result());
3445
3446 DeferredStringCharCodeAt* deferred =
3447 new DeferredStringCharCodeAt(this, instr);
3448
3449 Label flat_string, ascii_string, done;
3450
3451 // Fetch the instance type of the receiver into result register.
3452 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3453 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3454
3455 // We need special handling for non-flat strings.
3456 STATIC_ASSERT(kSeqStringTag == 0);
3457 __ tst(result, Operand(kStringRepresentationMask));
3458 __ b(eq, &flat_string);
3459
3460 // Handle non-flat strings.
3461 __ tst(result, Operand(kIsConsStringMask));
3462 __ b(eq, deferred->entry());
3463
3464 // ConsString.
3465 // Check whether the right hand side is the empty string (i.e. if
3466 // this is really a flat string in a cons string). If that is not
3467 // the case we would rather go to the runtime system now to flatten
3468 // the string.
3469 __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset));
3470 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
3471 __ cmp(scratch, ip);
3472 __ b(ne, deferred->entry());
3473 // Get the first of the two strings and load its instance type.
3474 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
3475 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3476 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3477 // If the first cons component is also non-flat, then go to runtime.
3478 STATIC_ASSERT(kSeqStringTag == 0);
3479 __ tst(result, Operand(kStringRepresentationMask));
3480 __ b(ne, deferred->entry());
3481
3482 // Check for 1-byte or 2-byte string.
3483 __ bind(&flat_string);
3484 STATIC_ASSERT(kAsciiStringTag != 0);
3485 __ tst(result, Operand(kStringEncodingMask));
3486 __ b(ne, &ascii_string);
3487
3488 // 2-byte string.
3489 // Load the 2-byte character code into the result register.
3490 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3491 if (instr->index()->IsConstantOperand()) {
3492 __ ldrh(result,
3493 FieldMemOperand(string,
3494 SeqTwoByteString::kHeaderSize + 2 * const_index));
3495 } else {
3496 __ add(scratch,
3497 string,
3498 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3499 __ ldrh(result, MemOperand(scratch, index, LSL, 1));
3500 }
3501 __ jmp(&done);
3502
3503 // ASCII string.
3504 // Load the byte into the result register.
3505 __ bind(&ascii_string);
3506 if (instr->index()->IsConstantOperand()) {
3507 __ ldrb(result, FieldMemOperand(string,
3508 SeqAsciiString::kHeaderSize + const_index));
3509 } else {
3510 __ add(scratch,
3511 string,
3512 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3513 __ ldrb(result, MemOperand(scratch, index));
3514 }
3515 __ bind(&done);
3516 __ bind(deferred->exit());
3517}
3518
3519
3520void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3521 Register string = ToRegister(instr->string());
3522 Register result = ToRegister(instr->result());
3523 Register scratch = scratch0();
3524
3525 // TODO(3095996): Get rid of this. For now, we need to make the
3526 // result register contain a valid pointer because it is already
3527 // contained in the register pointer map.
3528 __ mov(result, Operand(0));
3529
Ben Murdoch8b112d22011-06-08 16:22:53 +01003530 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003531 __ push(string);
3532 // Push the index as a smi. This is safe because of the checks in
3533 // DoStringCharCodeAt above.
3534 if (instr->index()->IsConstantOperand()) {
3535 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3536 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3537 __ push(scratch);
3538 } else {
3539 Register index = ToRegister(instr->index());
3540 __ SmiTag(index);
3541 __ push(index);
3542 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003543 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003544 if (FLAG_debug_code) {
3545 __ AbortIfNotSmi(r0);
3546 }
3547 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003548 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003549}
3550
3551
Steve Block44f0eee2011-05-26 01:26:41 +01003552void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3553 class DeferredStringCharFromCode: public LDeferredCode {
3554 public:
3555 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3556 : LDeferredCode(codegen), instr_(instr) { }
3557 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3558 private:
3559 LStringCharFromCode* instr_;
3560 };
3561
3562 DeferredStringCharFromCode* deferred =
3563 new DeferredStringCharFromCode(this, instr);
3564
3565 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3566 Register char_code = ToRegister(instr->char_code());
3567 Register result = ToRegister(instr->result());
3568 ASSERT(!char_code.is(result));
3569
3570 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3571 __ b(hi, deferred->entry());
3572 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3573 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3574 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3575 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3576 __ cmp(result, ip);
3577 __ b(eq, deferred->entry());
3578 __ bind(deferred->exit());
3579}
3580
3581
3582void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3583 Register char_code = ToRegister(instr->char_code());
3584 Register result = ToRegister(instr->result());
3585
3586 // TODO(3095996): Get rid of this. For now, we need to make the
3587 // result register contain a valid pointer because it is already
3588 // contained in the register pointer map.
3589 __ mov(result, Operand(0));
3590
Ben Murdoch8b112d22011-06-08 16:22:53 +01003591 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003592 __ SmiTag(char_code);
3593 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003594 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003595 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003596}
3597
3598
Steve Block1e0659c2011-05-24 12:43:12 +01003599void LCodeGen::DoStringLength(LStringLength* instr) {
3600 Register string = ToRegister(instr->InputAt(0));
3601 Register result = ToRegister(instr->result());
3602 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3603}
3604
3605
Ben Murdochb0fe1622011-05-05 13:52:32 +01003606void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003607 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003608 ASSERT(input->IsRegister() || input->IsStackSlot());
3609 LOperand* output = instr->result();
3610 ASSERT(output->IsDoubleRegister());
3611 SwVfpRegister single_scratch = double_scratch0().low();
3612 if (input->IsStackSlot()) {
3613 Register scratch = scratch0();
3614 __ ldr(scratch, ToMemOperand(input));
3615 __ vmov(single_scratch, scratch);
3616 } else {
3617 __ vmov(single_scratch, ToRegister(input));
3618 }
3619 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003620}
3621
3622
3623void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3624 class DeferredNumberTagI: public LDeferredCode {
3625 public:
3626 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3627 : LDeferredCode(codegen), instr_(instr) { }
3628 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3629 private:
3630 LNumberTagI* instr_;
3631 };
3632
Steve Block1e0659c2011-05-24 12:43:12 +01003633 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003634 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3635 Register reg = ToRegister(input);
3636
3637 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3638 __ SmiTag(reg, SetCC);
3639 __ b(vs, deferred->entry());
3640 __ bind(deferred->exit());
3641}
3642
3643
3644void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3645 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003646 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003647 DoubleRegister dbl_scratch = double_scratch0();
3648 SwVfpRegister flt_scratch = dbl_scratch.low();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003649
3650 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003651 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003652
3653 // There was overflow, so bits 30 and 31 of the original integer
3654 // disagree. Try to allocate a heap number in new space and store
3655 // the value in there. If that fails, call the runtime system.
3656 Label done;
3657 __ SmiUntag(reg);
3658 __ eor(reg, reg, Operand(0x80000000));
3659 __ vmov(flt_scratch, reg);
3660 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3661 if (FLAG_inline_new) {
3662 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3663 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3664 if (!reg.is(r5)) __ mov(reg, r5);
3665 __ b(&done);
3666 }
3667
3668 // Slow case: Call the runtime system to do the number allocation.
3669 __ bind(&slow);
3670
3671 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3672 // register is stored, as this register is in the pointer map, but contains an
3673 // integer value.
3674 __ mov(ip, Operand(0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003675 __ StoreToSafepointRegisterSlot(ip, reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003676 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003677 if (!reg.is(r0)) __ mov(reg, r0);
3678
3679 // Done. Put the value in dbl_scratch into the value of the allocated heap
3680 // number.
3681 __ bind(&done);
3682 __ sub(ip, reg, Operand(kHeapObjectTag));
3683 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003684 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003685}
3686
3687
3688void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3689 class DeferredNumberTagD: public LDeferredCode {
3690 public:
3691 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3692 : LDeferredCode(codegen), instr_(instr) { }
3693 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3694 private:
3695 LNumberTagD* instr_;
3696 };
3697
Steve Block1e0659c2011-05-24 12:43:12 +01003698 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003699 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003700 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003701 Register temp1 = ToRegister(instr->TempAt(0));
3702 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003703
3704 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3705 if (FLAG_inline_new) {
3706 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3707 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3708 } else {
3709 __ jmp(deferred->entry());
3710 }
3711 __ bind(deferred->exit());
3712 __ sub(ip, reg, Operand(kHeapObjectTag));
3713 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3714}
3715
3716
3717void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3718 // TODO(3095996): Get rid of this. For now, we need to make the
3719 // result register contain a valid pointer because it is already
3720 // contained in the register pointer map.
3721 Register reg = ToRegister(instr->result());
3722 __ mov(reg, Operand(0));
3723
Ben Murdoch8b112d22011-06-08 16:22:53 +01003724 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3725 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003726 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003727}
3728
3729
3730void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003731 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003732 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3733 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3734 __ SmiTag(ToRegister(input));
3735}
3736
3737
3738void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003739 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003740 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3741 if (instr->needs_check()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003742 ASSERT(kHeapObjectTag == 1);
3743 // If the input is a HeapObject, SmiUntag will set the carry flag.
3744 __ SmiUntag(ToRegister(input), SetCC);
3745 DeoptimizeIf(cs, instr->environment());
3746 } else {
3747 __ SmiUntag(ToRegister(input));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003748 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003749}
3750
3751
3752void LCodeGen::EmitNumberUntagD(Register input_reg,
3753 DoubleRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003754 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003755 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003756 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003757 SwVfpRegister flt_scratch = double_scratch0().low();
3758 ASSERT(!result_reg.is(double_scratch0()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003759
3760 Label load_smi, heap_number, done;
3761
3762 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003763 __ JumpIfSmi(input_reg, &load_smi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003764
3765 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003766 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003767 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003768 __ cmp(scratch, Operand(ip));
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003769 if (deoptimize_on_undefined) {
3770 DeoptimizeIf(ne, env);
3771 } else {
3772 Label heap_number;
3773 __ b(eq, &heap_number);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003774
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003775 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3776 __ cmp(input_reg, Operand(ip));
3777 DeoptimizeIf(ne, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003778
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003779 // Convert undefined to NaN.
3780 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3781 __ sub(ip, ip, Operand(kHeapObjectTag));
3782 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3783 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003784
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003785 __ bind(&heap_number);
3786 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003787 // Heap number to double register conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003788 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3789 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3790 __ jmp(&done);
3791
3792 // Smi to double register conversion
3793 __ bind(&load_smi);
3794 __ SmiUntag(input_reg); // Untag smi before converting to float.
3795 __ vmov(flt_scratch, input_reg);
3796 __ vcvt_f64_s32(result_reg, flt_scratch);
3797 __ SmiTag(input_reg); // Retag smi.
3798 __ bind(&done);
3799}
3800
3801
3802class DeferredTaggedToI: public LDeferredCode {
3803 public:
3804 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3805 : LDeferredCode(codegen), instr_(instr) { }
3806 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3807 private:
3808 LTaggedToI* instr_;
3809};
3810
3811
3812void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003813 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003814 Register scratch1 = scratch0();
3815 Register scratch2 = ToRegister(instr->TempAt(0));
3816 DwVfpRegister double_scratch = double_scratch0();
3817 SwVfpRegister single_scratch = double_scratch.low();
3818
3819 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3820 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3821
3822 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003823
Ben Murdoch257744e2011-11-30 15:57:28 +00003824 // The input was optimistically untagged; revert it.
3825 // The carry flag is set when we reach this deferred code as we just executed
3826 // SmiUntag(heap_object, SetCC)
3827 ASSERT(kHeapObjectTag == 1);
3828 __ adc(input_reg, input_reg, Operand(input_reg));
3829
Ben Murdochb0fe1622011-05-05 13:52:32 +01003830 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01003831 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003832 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01003833 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003834
3835 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003836 Register scratch3 = ToRegister(instr->TempAt(1));
3837 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3838 ASSERT(!scratch3.is(input_reg) &&
3839 !scratch3.is(scratch1) &&
3840 !scratch3.is(scratch2));
3841 // Performs a truncating conversion of a floating point number as used by
3842 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003843 Label heap_number;
3844 __ b(eq, &heap_number);
3845 // Check for undefined. Undefined is converted to zero for truncating
3846 // conversions.
3847 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3848 __ cmp(input_reg, Operand(ip));
3849 DeoptimizeIf(ne, instr->environment());
3850 __ mov(input_reg, Operand(0));
3851 __ b(&done);
3852
3853 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01003854 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3855 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3856
3857 __ EmitECMATruncate(input_reg,
3858 double_scratch2,
3859 single_scratch,
3860 scratch1,
3861 scratch2,
3862 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003863
3864 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003865 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003866 // Deoptimize if we don't have a heap number.
3867 DeoptimizeIf(ne, instr->environment());
3868
3869 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003870 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3871 __ EmitVFPTruncate(kRoundToZero,
3872 single_scratch,
3873 double_scratch,
3874 scratch1,
3875 scratch2,
3876 kCheckForInexactConversion);
3877 DeoptimizeIf(ne, instr->environment());
3878 // Load the result.
3879 __ vmov(input_reg, single_scratch);
3880
Ben Murdochb0fe1622011-05-05 13:52:32 +01003881 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003882 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003883 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01003884 __ vmov(scratch1, double_scratch.high());
3885 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003886 DeoptimizeIf(ne, instr->environment());
3887 }
3888 }
3889 __ bind(&done);
3890}
3891
3892
3893void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003894 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003895 ASSERT(input->IsRegister());
3896 ASSERT(input->Equals(instr->result()));
3897
3898 Register input_reg = ToRegister(input);
3899
3900 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3901
Ben Murdoch257744e2011-11-30 15:57:28 +00003902 // Optimistically untag the input.
3903 // If the input is a HeapObject, SmiUntag will set the carry flag.
3904 __ SmiUntag(input_reg, SetCC);
3905 // Branch to deferred code if the input was tagged.
3906 // The deferred code will take care of restoring the tag.
3907 __ b(cs, deferred->entry());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003908 __ bind(deferred->exit());
3909}
3910
3911
3912void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003913 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003914 ASSERT(input->IsRegister());
3915 LOperand* result = instr->result();
3916 ASSERT(result->IsDoubleRegister());
3917
3918 Register input_reg = ToRegister(input);
3919 DoubleRegister result_reg = ToDoubleRegister(result);
3920
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003921 EmitNumberUntagD(input_reg, result_reg,
3922 instr->hydrogen()->deoptimize_on_undefined(),
3923 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003924}
3925
3926
3927void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003928 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003929 Register scratch1 = scratch0();
3930 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003931 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
3932 DwVfpRegister double_scratch = double_scratch0();
3933 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003934
Steve Block44f0eee2011-05-26 01:26:41 +01003935 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003936
Steve Block44f0eee2011-05-26 01:26:41 +01003937 if (instr->truncating()) {
3938 Register scratch3 = ToRegister(instr->TempAt(1));
3939 __ EmitECMATruncate(result_reg,
3940 double_input,
3941 single_scratch,
3942 scratch1,
3943 scratch2,
3944 scratch3);
3945 } else {
3946 VFPRoundingMode rounding_mode = kRoundToMinusInf;
3947 __ EmitVFPTruncate(rounding_mode,
3948 single_scratch,
3949 double_input,
3950 scratch1,
3951 scratch2,
3952 kCheckForInexactConversion);
3953 // Deoptimize if we had a vfp invalid exception,
3954 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01003955 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003956 // Retrieve the result.
3957 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003958 }
Steve Block44f0eee2011-05-26 01:26:41 +01003959 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003960}
3961
3962
3963void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003964 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003965 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003966 DeoptimizeIf(ne, instr->environment());
3967}
3968
3969
3970void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3971 LOperand* input = instr->InputAt(0);
3972 __ tst(ToRegister(input), Operand(kSmiTagMask));
3973 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003974}
3975
3976
3977void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003978 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003979 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01003980
3981 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3982 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003983
Ben Murdoch257744e2011-11-30 15:57:28 +00003984 if (instr->hydrogen()->is_interval_check()) {
3985 InstanceType first;
3986 InstanceType last;
3987 instr->hydrogen()->GetCheckInterval(&first, &last);
3988
3989 __ cmp(scratch, Operand(first));
3990
3991 // If there is only one type in the interval check for equality.
3992 if (first == last) {
3993 DeoptimizeIf(ne, instr->environment());
3994 } else {
3995 DeoptimizeIf(lo, instr->environment());
3996 // Omit check for the last type.
3997 if (last != LAST_TYPE) {
3998 __ cmp(scratch, Operand(last));
3999 DeoptimizeIf(hi, instr->environment());
4000 }
4001 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01004002 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004003 uint8_t mask;
4004 uint8_t tag;
4005 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4006
4007 if (IsPowerOf2(mask)) {
4008 ASSERT(tag == 0 || IsPowerOf2(tag));
4009 __ tst(scratch, Operand(mask));
4010 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment());
4011 } else {
4012 __ and_(scratch, scratch, Operand(mask));
4013 __ cmp(scratch, Operand(tag));
4014 DeoptimizeIf(ne, instr->environment());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004015 }
4016 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004017}
4018
4019
4020void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004021 ASSERT(instr->InputAt(0)->IsRegister());
4022 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004023 __ cmp(reg, Operand(instr->hydrogen()->target()));
4024 DeoptimizeIf(ne, instr->environment());
4025}
4026
4027
4028void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004029 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01004030 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004031 ASSERT(input->IsRegister());
4032 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01004033 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
4034 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004035 DeoptimizeIf(ne, instr->environment());
4036}
4037
4038
Ben Murdoch257744e2011-11-30 15:57:28 +00004039void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4040 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
4041 Register result_reg = ToRegister(instr->result());
4042 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4043 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
4044}
4045
4046
4047void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4048 Register unclamped_reg = ToRegister(instr->unclamped());
4049 Register result_reg = ToRegister(instr->result());
4050 __ ClampUint8(result_reg, unclamped_reg);
4051}
4052
4053
4054void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4055 Register scratch = scratch0();
4056 Register input_reg = ToRegister(instr->unclamped());
4057 Register result_reg = ToRegister(instr->result());
4058 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4059 Label is_smi, done, heap_number;
4060
4061 // Both smi and heap number cases are handled.
4062 __ JumpIfSmi(input_reg, &is_smi);
4063
4064 // Check for heap number
4065 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4066 __ cmp(scratch, Operand(factory()->heap_number_map()));
4067 __ b(eq, &heap_number);
4068
4069 // Check for undefined. Undefined is converted to zero for clamping
4070 // conversions.
4071 __ cmp(input_reg, Operand(factory()->undefined_value()));
4072 DeoptimizeIf(ne, instr->environment());
4073 __ movt(input_reg, 0);
4074 __ jmp(&done);
4075
4076 // Heap number
4077 __ bind(&heap_number);
4078 __ vldr(double_scratch0(), FieldMemOperand(input_reg,
4079 HeapNumber::kValueOffset));
4080 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
4081 __ jmp(&done);
4082
4083 // smi
4084 __ bind(&is_smi);
4085 __ SmiUntag(result_reg, input_reg);
4086 __ ClampUint8(result_reg, result_reg);
4087
4088 __ bind(&done);
4089}
4090
4091
Ben Murdochb8e0da22011-05-16 14:20:40 +01004092void LCodeGen::LoadHeapObject(Register result,
4093 Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01004094 if (heap()->InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01004095 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01004096 factory()->NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01004097 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01004098 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01004099 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004100 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01004101 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004102}
4103
4104
4105void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004106 Register temp1 = ToRegister(instr->TempAt(0));
4107 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01004108
4109 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01004110 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01004111
4112 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004113 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004114
4115 // Check prototype maps up to the holder.
4116 while (!current_prototype.is_identical_to(holder)) {
4117 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4118 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4119 DeoptimizeIf(ne, instr->environment());
4120 current_prototype =
4121 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4122 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004123 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004124 }
4125
4126 // Check the holder map.
4127 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4128 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4129 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004130}
4131
4132
4133void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004134 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4135 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4136 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4137 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
4138 __ Push(r3, r2, r1);
4139
4140 // Pick the right runtime function or stub to call.
4141 int length = instr->hydrogen()->length();
4142 if (instr->hydrogen()->IsCopyOnWrite()) {
4143 ASSERT(instr->hydrogen()->depth() == 1);
4144 FastCloneShallowArrayStub::Mode mode =
4145 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4146 FastCloneShallowArrayStub stub(mode, length);
4147 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4148 } else if (instr->hydrogen()->depth() > 1) {
4149 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4150 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4151 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4152 } else {
4153 FastCloneShallowArrayStub::Mode mode =
4154 FastCloneShallowArrayStub::CLONE_ELEMENTS;
4155 FastCloneShallowArrayStub stub(mode, length);
4156 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4157 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004158}
4159
4160
4161void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004162 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4163 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
4164 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4165 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
4166 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
4167 __ Push(r4, r3, r2, r1);
4168
4169 // Pick the right runtime function to call.
4170 if (instr->hydrogen()->depth() > 1) {
4171 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4172 } else {
4173 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4174 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004175}
4176
4177
Steve Block44f0eee2011-05-26 01:26:41 +01004178void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4179 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
4180 __ push(r0);
4181 CallRuntime(Runtime::kToFastProperties, 1, instr);
4182}
4183
4184
Ben Murdochb0fe1622011-05-05 13:52:32 +01004185void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004186 Label materialized;
4187 // Registers will be used as follows:
4188 // r3 = JS function.
4189 // r7 = literals array.
4190 // r1 = regexp literal.
4191 // r0 = regexp literal clone.
4192 // r2 and r4-r6 are used as temporaries.
4193 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4194 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4195 int literal_offset = FixedArray::kHeaderSize +
4196 instr->hydrogen()->literal_index() * kPointerSize;
4197 __ ldr(r1, FieldMemOperand(r7, literal_offset));
4198 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4199 __ cmp(r1, ip);
4200 __ b(ne, &materialized);
4201
4202 // Create regexp literal using runtime function
4203 // Result will be in r0.
4204 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4205 __ mov(r5, Operand(instr->hydrogen()->pattern()));
4206 __ mov(r4, Operand(instr->hydrogen()->flags()));
4207 __ Push(r7, r6, r5, r4);
4208 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4209 __ mov(r1, r0);
4210
4211 __ bind(&materialized);
4212 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4213 Label allocated, runtime_allocate;
4214
4215 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
4216 __ jmp(&allocated);
4217
4218 __ bind(&runtime_allocate);
4219 __ mov(r0, Operand(Smi::FromInt(size)));
4220 __ Push(r1, r0);
4221 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4222 __ pop(r1);
4223
4224 __ bind(&allocated);
4225 // Copy the content into the newly allocated memory.
4226 // (Unroll copy loop once for better throughput).
4227 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4228 __ ldr(r3, FieldMemOperand(r1, i));
4229 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
4230 __ str(r3, FieldMemOperand(r0, i));
4231 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
4232 }
4233 if ((size % (2 * kPointerSize)) != 0) {
4234 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
4235 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
4236 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004237}
4238
4239
4240void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004241 // Use the fast case closure allocation code that allocates in new
4242 // space for nested functions that don't need literals cloning.
4243 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004244 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004245 if (!pretenure && shared_info->num_literals() == 0) {
4246 FastNewClosureStub stub(
4247 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004248 __ mov(r1, Operand(shared_info));
4249 __ push(r1);
4250 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4251 } else {
4252 __ mov(r2, Operand(shared_info));
4253 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004254 ? factory()->true_value()
4255 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004256 __ Push(cp, r2, r1);
4257 CallRuntime(Runtime::kNewClosure, 3, instr);
4258 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004259}
4260
4261
4262void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004263 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004264 __ push(input);
4265 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004266}
4267
4268
Ben Murdochb0fe1622011-05-05 13:52:32 +01004269void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004270 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004271 int true_block = chunk_->LookupDestination(instr->true_block_id());
4272 int false_block = chunk_->LookupDestination(instr->false_block_id());
4273 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4274 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4275
4276 Condition final_branch_condition = EmitTypeofIs(true_label,
4277 false_label,
4278 input,
4279 instr->type_literal());
4280
4281 EmitBranch(true_block, false_block, final_branch_condition);
4282}
4283
4284
4285Condition LCodeGen::EmitTypeofIs(Label* true_label,
4286 Label* false_label,
4287 Register input,
4288 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004289 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004290 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004291 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004292 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004293 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4294 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4295 __ cmp(input, Operand(ip));
4296 final_branch_condition = eq;
4297
Steve Block44f0eee2011-05-26 01:26:41 +01004298 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004299 __ JumpIfSmi(input, false_label);
4300 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4301 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004302 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4303 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004304 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004305
Steve Block44f0eee2011-05-26 01:26:41 +01004306 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004307 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004308 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004309 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004310 final_branch_condition = eq;
4311
Steve Block44f0eee2011-05-26 01:26:41 +01004312 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004313 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004314 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004315 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004316 // Check for undetectable objects => true.
4317 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4318 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4319 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4320 final_branch_condition = ne;
4321
Steve Block44f0eee2011-05-26 01:26:41 +01004322 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004323 __ JumpIfSmi(input, false_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004324 __ CompareObjectType(input, input, scratch,
4325 FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004326 final_branch_condition = ge;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004327
Steve Block44f0eee2011-05-26 01:26:41 +01004328 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004329 __ JumpIfSmi(input, false_label);
4330 __ CompareRoot(input, Heap::kNullValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004331 __ b(eq, true_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004332 __ CompareObjectType(input, input, scratch,
4333 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4334 __ b(lt, false_label);
4335 __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4336 __ b(gt, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004337 // Check for undetectable objects => false.
4338 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4339 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004340 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004341
4342 } else {
4343 final_branch_condition = ne;
4344 __ b(false_label);
4345 // A dead branch instruction will be generated after this point.
4346 }
4347
4348 return final_branch_condition;
4349}
4350
4351
Steve Block1e0659c2011-05-24 12:43:12 +01004352void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4353 Register temp1 = ToRegister(instr->TempAt(0));
4354 int true_block = chunk_->LookupDestination(instr->true_block_id());
4355 int false_block = chunk_->LookupDestination(instr->false_block_id());
4356
4357 EmitIsConstructCall(temp1, scratch0());
4358 EmitBranch(true_block, false_block, eq);
4359}
4360
4361
4362void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4363 ASSERT(!temp1.is(temp2));
4364 // Get the frame pointer for the calling frame.
4365 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4366
4367 // Skip the arguments adaptor frame if it exists.
4368 Label check_frame_marker;
4369 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4370 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4371 __ b(ne, &check_frame_marker);
4372 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4373
4374 // Check the marker in the calling frame.
4375 __ bind(&check_frame_marker);
4376 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4377 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4378}
4379
4380
Ben Murdochb0fe1622011-05-05 13:52:32 +01004381void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4382 // No code for lazy bailout instruction. Used to capture environment after a
4383 // call for populating the safepoint data with deoptimization data.
4384}
4385
4386
4387void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004388 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004389}
4390
4391
4392void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004393 Register object = ToRegister(instr->object());
4394 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004395 Register strict = scratch0();
4396 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4397 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004398 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4399 LPointerMap* pointers = instr->pointer_map();
4400 LEnvironment* env = instr->deoptimization_environment();
4401 RecordPosition(pointers->position());
4402 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01004403 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01004404 pointers,
4405 env->deoptimization_index());
Ben Murdoch257744e2011-11-30 15:57:28 +00004406 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4407}
4408
4409
4410void LCodeGen::DoIn(LIn* instr) {
4411 Register obj = ToRegister(instr->object());
4412 Register key = ToRegister(instr->key());
4413 __ Push(key, obj);
4414 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4415 LPointerMap* pointers = instr->pointer_map();
4416 LEnvironment* env = instr->deoptimization_environment();
4417 RecordPosition(pointers->position());
4418 RegisterEnvironmentForDeoptimization(env);
4419 SafepointGenerator safepoint_generator(this,
4420 pointers,
4421 env->deoptimization_index());
4422 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004423}
4424
4425
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004426void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4427 {
4428 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4429 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4430 RegisterLazyDeoptimization(
4431 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4432 }
4433
4434 // The gap code includes the restoring of the safepoint registers.
4435 int pc = masm()->pc_offset();
4436 safepoints_.SetPcAfterGap(pc);
4437}
4438
4439
Ben Murdochb0fe1622011-05-05 13:52:32 +01004440void LCodeGen::DoStackCheck(LStackCheck* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004441 class DeferredStackCheck: public LDeferredCode {
4442 public:
4443 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4444 : LDeferredCode(codegen), instr_(instr) { }
4445 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4446 private:
4447 LStackCheck* instr_;
4448 };
4449
4450 if (instr->hydrogen()->is_function_entry()) {
4451 // Perform stack overflow check.
4452 Label done;
4453 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4454 __ cmp(sp, Operand(ip));
4455 __ b(hs, &done);
4456 StackCheckStub stub;
4457 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4458 __ bind(&done);
4459 } else {
4460 ASSERT(instr->hydrogen()->is_backwards_branch());
4461 // Perform stack overflow check if this goto needs it before jumping.
4462 DeferredStackCheck* deferred_stack_check =
4463 new DeferredStackCheck(this, instr);
4464 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4465 __ cmp(sp, Operand(ip));
4466 __ b(lo, deferred_stack_check->entry());
4467 __ bind(instr->done_label());
4468 deferred_stack_check->SetExit(instr->done_label());
4469 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004470}
4471
4472
4473void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004474 // This is a pseudo-instruction that ensures that the environment here is
4475 // properly registered for deoptimization and records the assembler's PC
4476 // offset.
4477 LEnvironment* environment = instr->environment();
4478 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4479 instr->SpilledDoubleRegisterArray());
4480
4481 // If the environment were already registered, we would have no way of
4482 // backpatching it with the spill slot operands.
4483 ASSERT(!environment->HasBeenRegistered());
4484 RegisterEnvironmentForDeoptimization(environment);
4485 ASSERT(osr_pc_offset_ == -1);
4486 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004487}
4488
4489
Ben Murdoch257744e2011-11-30 15:57:28 +00004490
4491
Ben Murdochb0fe1622011-05-05 13:52:32 +01004492#undef __
4493
4494} } // namespace v8::internal