blob: f5d744914977f3d90a008c1f7f6bc42c06a0ebc2 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
43 int deoptimization_index)
44 : codegen_(codegen),
45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { }
47 virtual ~SafepointGenerator() { }
48
Ben Murdoch257744e2011-11-30 15:57:28 +000049 virtual void BeforeCall(int call_size) const {
Steve Block44f0eee2011-05-26 01:26:41 +010050 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65
Ben Murdoch257744e2011-11-30 15:57:28 +000066 virtual void AfterCall() const {
Ben Murdochb0fe1622011-05-05 13:52:32 +010067 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
68 }
69
70 private:
71 LCodeGen* codegen_;
72 LPointerMap* pointers_;
73 int deoptimization_index_;
74};
75
76
77#define __ masm()->
78
79bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused());
82 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3);
84 CpuFeatures::Scope scope2(ARMv7);
85 return GeneratePrologue() &&
86 GenerateBody() &&
87 GenerateDeferredCode() &&
Ben Murdoch257744e2011-11-30 15:57:28 +000088 GenerateDeoptJumpTable() &&
Ben Murdochb0fe1622011-05-05 13:52:32 +010089 GenerateSafepointTable();
90}
91
92
93void LCodeGen::FinishCode(Handle<Code> code) {
94 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000095 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010096 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010097 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010098 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010099}
100
101
102void LCodeGen::Abort(const char* format, ...) {
103 if (FLAG_trace_bailout) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000104 SmartArrayPointer<char> name(
105 info()->shared_info()->DebugName()->ToCString());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100106 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100107 va_list arguments;
108 va_start(arguments, format);
109 OS::VPrint(format, arguments);
110 va_end(arguments);
111 PrintF("\n");
112 }
113 status_ = ABORTED;
114}
115
116
117void LCodeGen::Comment(const char* format, ...) {
118 if (!FLAG_code_comments) return;
119 char buffer[4 * KB];
120 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
121 va_list arguments;
122 va_start(arguments, format);
123 builder.AddFormattedList(format, arguments);
124 va_end(arguments);
125
126 // Copy the string before recording it in the assembler to avoid
127 // issues when the stack allocated buffer goes out of scope.
128 size_t length = builder.position();
129 Vector<char> copy = Vector<char>::New(length + 1);
130 memcpy(copy.start(), builder.Finalize(), copy.length());
131 masm()->RecordComment(copy.start());
132}
133
134
135bool LCodeGen::GeneratePrologue() {
136 ASSERT(is_generating());
137
138#ifdef DEBUG
139 if (strlen(FLAG_stop_at) > 0 &&
140 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
141 __ stop("stop_at");
142 }
143#endif
144
145 // r1: Callee's JS function.
146 // cp: Callee's context.
147 // fp: Caller's frame pointer.
148 // lr: Caller's pc.
149
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000150 // Strict mode functions and builtins need to replace the receiver
151 // with undefined when called as functions (without an explicit
152 // receiver object). r5 is zero for method calls and non-zero for
153 // function calls.
154 if (info_->is_strict_mode() || info_->is_native()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000155 Label ok;
156 __ cmp(r5, Operand(0));
157 __ b(eq, &ok);
158 int receiver_offset = scope()->num_parameters() * kPointerSize;
159 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
160 __ str(r2, MemOperand(sp, receiver_offset));
161 __ bind(&ok);
162 }
163
Ben Murdochb0fe1622011-05-05 13:52:32 +0100164 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
165 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
166
167 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000168 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100169 if (slots > 0) {
170 if (FLAG_debug_code) {
171 __ mov(r0, Operand(slots));
172 __ mov(r2, Operand(kSlotsZapValue));
173 Label loop;
174 __ bind(&loop);
175 __ push(r2);
176 __ sub(r0, r0, Operand(1), SetCC);
177 __ b(ne, &loop);
178 } else {
179 __ sub(sp, sp, Operand(slots * kPointerSize));
180 }
181 }
182
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100183 // Possibly allocate a local context.
184 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185 if (heap_slots > 0) {
186 Comment(";;; Allocate local context");
187 // Argument to NewContext is the function, which is in r1.
188 __ push(r1);
189 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
190 FastNewContextStub stub(heap_slots);
191 __ CallStub(&stub);
192 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000193 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100194 }
195 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
196 // Context is returned in both r0 and cp. It replaces the context
197 // passed to us. It's saved in the stack and kept live in cp.
198 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
199 // Copy any necessary parameters into the context.
200 int num_parameters = scope()->num_parameters();
201 for (int i = 0; i < num_parameters; i++) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000202 Variable* var = scope()->parameter(i);
203 if (var->IsContextSlot()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100204 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
205 (num_parameters - 1 - i) * kPointerSize;
206 // Load parameter from stack.
207 __ ldr(r0, MemOperand(fp, parameter_offset));
208 // Store it in the context.
Ben Murdoch589d6972011-11-30 16:04:58 +0000209 __ mov(r1, Operand(Context::SlotOffset(var->index())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100210 __ str(r0, MemOperand(cp, r1));
211 // Update the write barrier. This clobbers all involved
212 // registers, so we have to use two more registers to avoid
213 // clobbering cp.
214 __ mov(r2, Operand(cp));
215 __ RecordWrite(r2, Operand(r1), r3, r0);
216 }
217 }
218 Comment(";;; End allocate local context");
219 }
220
Ben Murdochb0fe1622011-05-05 13:52:32 +0100221 // Trace the call.
222 if (FLAG_trace) {
223 __ CallRuntime(Runtime::kTraceEnter, 0);
224 }
225 return !is_aborted();
226}
227
228
229bool LCodeGen::GenerateBody() {
230 ASSERT(is_generating());
231 bool emit_instructions = true;
232 for (current_instruction_ = 0;
233 !is_aborted() && current_instruction_ < instructions_->length();
234 current_instruction_++) {
235 LInstruction* instr = instructions_->at(current_instruction_);
236 if (instr->IsLabel()) {
237 LLabel* label = LLabel::cast(instr);
238 emit_instructions = !label->HasReplacement();
239 }
240
241 if (emit_instructions) {
242 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
243 instr->CompileToNative(this);
244 }
245 }
246 return !is_aborted();
247}
248
249
250LInstruction* LCodeGen::GetNextInstruction() {
251 if (current_instruction_ < instructions_->length() - 1) {
252 return instructions_->at(current_instruction_ + 1);
253 } else {
254 return NULL;
255 }
256}
257
258
259bool LCodeGen::GenerateDeferredCode() {
260 ASSERT(is_generating());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000261 if (deferred_.length() > 0) {
262 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
263 LDeferredCode* code = deferred_[i];
264 __ bind(code->entry());
265 code->Generate();
266 __ jmp(code->exit());
267 }
268
269 // Pad code to ensure that the last piece of deferred code have
270 // room for lazy bailout.
271 while ((masm()->pc_offset() - LastSafepointEnd())
272 < Deoptimizer::patch_size()) {
273 __ nop();
274 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100275 }
276
Ben Murdoch257744e2011-11-30 15:57:28 +0000277 // Force constant pool emission at the end of the deferred code to make
278 // sure that no constant pools are emitted after.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100279 masm()->CheckConstPool(true, false);
280
Ben Murdoch257744e2011-11-30 15:57:28 +0000281 return !is_aborted();
282}
283
284
285bool LCodeGen::GenerateDeoptJumpTable() {
286 // Check that the jump table is accessible from everywhere in the function
287 // code, ie that offsets to the table can be encoded in the 24bit signed
288 // immediate of a branch instruction.
289 // To simplify we consider the code size from the first instruction to the
290 // end of the jump table. We also don't consider the pc load delta.
291 // Each entry in the jump table generates one instruction and inlines one
292 // 32bit data after it.
293 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
294 deopt_jump_table_.length() * 2)) {
295 Abort("Generated code is too large");
296 }
297
298 // Block the constant pool emission during the jump table emission.
299 __ BlockConstPoolFor(deopt_jump_table_.length());
300 __ RecordComment("[ Deoptimisation jump table");
301 Label table_start;
302 __ bind(&table_start);
303 for (int i = 0; i < deopt_jump_table_.length(); i++) {
304 __ bind(&deopt_jump_table_[i].label);
305 __ ldr(pc, MemOperand(pc, Assembler::kInstrSize - Assembler::kPcLoadDelta));
306 __ dd(reinterpret_cast<uint32_t>(deopt_jump_table_[i].address));
307 }
308 ASSERT(masm()->InstructionsGeneratedSince(&table_start) ==
309 deopt_jump_table_.length() * 2);
310 __ RecordComment("]");
311
312 // The deoptimization jump table is the last part of the instruction
313 // sequence. Mark the generated code as done unless we bailed out.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100314 if (!is_aborted()) status_ = DONE;
315 return !is_aborted();
316}
317
318
319bool LCodeGen::GenerateSafepointTable() {
320 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +0000321 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100322 return !is_aborted();
323}
324
325
326Register LCodeGen::ToRegister(int index) const {
327 return Register::FromAllocationIndex(index);
328}
329
330
331DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
332 return DoubleRegister::FromAllocationIndex(index);
333}
334
335
336Register LCodeGen::ToRegister(LOperand* op) const {
337 ASSERT(op->IsRegister());
338 return ToRegister(op->index());
339}
340
341
342Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
343 if (op->IsRegister()) {
344 return ToRegister(op->index());
345 } else if (op->IsConstantOperand()) {
346 __ mov(scratch, ToOperand(op));
347 return scratch;
348 } else if (op->IsStackSlot() || op->IsArgument()) {
349 __ ldr(scratch, ToMemOperand(op));
350 return scratch;
351 }
352 UNREACHABLE();
353 return scratch;
354}
355
356
357DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
358 ASSERT(op->IsDoubleRegister());
359 return ToDoubleRegister(op->index());
360}
361
362
363DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
364 SwVfpRegister flt_scratch,
365 DoubleRegister dbl_scratch) {
366 if (op->IsDoubleRegister()) {
367 return ToDoubleRegister(op->index());
368 } else if (op->IsConstantOperand()) {
369 LConstantOperand* const_op = LConstantOperand::cast(op);
370 Handle<Object> literal = chunk_->LookupLiteral(const_op);
371 Representation r = chunk_->LookupLiteralRepresentation(const_op);
372 if (r.IsInteger32()) {
373 ASSERT(literal->IsNumber());
374 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
375 __ vmov(flt_scratch, ip);
376 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
377 return dbl_scratch;
378 } else if (r.IsDouble()) {
379 Abort("unsupported double immediate");
380 } else if (r.IsTagged()) {
381 Abort("unsupported tagged immediate");
382 }
383 } else if (op->IsStackSlot() || op->IsArgument()) {
384 // TODO(regis): Why is vldr not taking a MemOperand?
385 // __ vldr(dbl_scratch, ToMemOperand(op));
386 MemOperand mem_op = ToMemOperand(op);
387 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
388 return dbl_scratch;
389 }
390 UNREACHABLE();
391 return dbl_scratch;
392}
393
394
395int LCodeGen::ToInteger32(LConstantOperand* op) const {
396 Handle<Object> value = chunk_->LookupLiteral(op);
397 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
398 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
399 value->Number());
400 return static_cast<int32_t>(value->Number());
401}
402
403
404Operand LCodeGen::ToOperand(LOperand* op) {
405 if (op->IsConstantOperand()) {
406 LConstantOperand* const_op = LConstantOperand::cast(op);
407 Handle<Object> literal = chunk_->LookupLiteral(const_op);
408 Representation r = chunk_->LookupLiteralRepresentation(const_op);
409 if (r.IsInteger32()) {
410 ASSERT(literal->IsNumber());
411 return Operand(static_cast<int32_t>(literal->Number()));
412 } else if (r.IsDouble()) {
413 Abort("ToOperand Unsupported double immediate.");
414 }
415 ASSERT(r.IsTagged());
416 return Operand(literal);
417 } else if (op->IsRegister()) {
418 return Operand(ToRegister(op));
419 } else if (op->IsDoubleRegister()) {
420 Abort("ToOperand IsDoubleRegister unimplemented");
421 return Operand(0);
422 }
423 // Stack slots not implemented, use ToMemOperand instead.
424 UNREACHABLE();
425 return Operand(0);
426}
427
428
429MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100430 ASSERT(!op->IsRegister());
431 ASSERT(!op->IsDoubleRegister());
432 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
433 int index = op->index();
434 if (index >= 0) {
435 // Local or spill slot. Skip the frame pointer, function, and
436 // context in the fixed part of the frame.
437 return MemOperand(fp, -(index + 3) * kPointerSize);
438 } else {
439 // Incoming parameter. Skip the return address.
440 return MemOperand(fp, -(index - 1) * kPointerSize);
441 }
442}
443
444
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100445MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
446 ASSERT(op->IsDoubleStackSlot());
447 int index = op->index();
448 if (index >= 0) {
449 // Local or spill slot. Skip the frame pointer, function, context,
450 // and the first word of the double in the fixed part of the frame.
451 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
452 } else {
453 // Incoming parameter. Skip the return address and the first word of
454 // the double.
455 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
456 }
457}
458
459
Ben Murdochb8e0da22011-05-16 14:20:40 +0100460void LCodeGen::WriteTranslation(LEnvironment* environment,
461 Translation* translation) {
462 if (environment == NULL) return;
463
464 // The translation includes one command per value in the environment.
465 int translation_size = environment->values()->length();
466 // The output frame height does not include the parameters.
467 int height = translation_size - environment->parameter_count();
468
469 WriteTranslation(environment->outer(), translation);
470 int closure_id = DefineDeoptimizationLiteral(environment->closure());
471 translation->BeginFrame(environment->ast_id(), closure_id, height);
472 for (int i = 0; i < translation_size; ++i) {
473 LOperand* value = environment->values()->at(i);
474 // spilled_registers_ and spilled_double_registers_ are either
475 // both NULL or both set.
476 if (environment->spilled_registers() != NULL && value != NULL) {
477 if (value->IsRegister() &&
478 environment->spilled_registers()[value->index()] != NULL) {
479 translation->MarkDuplicate();
480 AddToTranslation(translation,
481 environment->spilled_registers()[value->index()],
482 environment->HasTaggedValueAt(i));
483 } else if (
484 value->IsDoubleRegister() &&
485 environment->spilled_double_registers()[value->index()] != NULL) {
486 translation->MarkDuplicate();
487 AddToTranslation(
488 translation,
489 environment->spilled_double_registers()[value->index()],
490 false);
491 }
492 }
493
494 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
495 }
496}
497
498
Ben Murdochb0fe1622011-05-05 13:52:32 +0100499void LCodeGen::AddToTranslation(Translation* translation,
500 LOperand* op,
501 bool is_tagged) {
502 if (op == NULL) {
503 // TODO(twuerthinger): Introduce marker operands to indicate that this value
504 // is not present and must be reconstructed from the deoptimizer. Currently
505 // this is only used for the arguments object.
506 translation->StoreArgumentsObject();
507 } else if (op->IsStackSlot()) {
508 if (is_tagged) {
509 translation->StoreStackSlot(op->index());
510 } else {
511 translation->StoreInt32StackSlot(op->index());
512 }
513 } else if (op->IsDoubleStackSlot()) {
514 translation->StoreDoubleStackSlot(op->index());
515 } else if (op->IsArgument()) {
516 ASSERT(is_tagged);
Ben Murdoch257744e2011-11-30 15:57:28 +0000517 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100518 translation->StoreStackSlot(src_index);
519 } else if (op->IsRegister()) {
520 Register reg = ToRegister(op);
521 if (is_tagged) {
522 translation->StoreRegister(reg);
523 } else {
524 translation->StoreInt32Register(reg);
525 }
526 } else if (op->IsDoubleRegister()) {
527 DoubleRegister reg = ToDoubleRegister(op);
528 translation->StoreDoubleRegister(reg);
529 } else if (op->IsConstantOperand()) {
530 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
531 int src_index = DefineDeoptimizationLiteral(literal);
532 translation->StoreLiteral(src_index);
533 } else {
534 UNREACHABLE();
535 }
536}
537
538
539void LCodeGen::CallCode(Handle<Code> code,
540 RelocInfo::Mode mode,
541 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100542 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
543}
544
545
546void LCodeGen::CallCodeGeneric(Handle<Code> code,
547 RelocInfo::Mode mode,
548 LInstruction* instr,
549 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100550 ASSERT(instr != NULL);
551 LPointerMap* pointers = instr->pointer_map();
552 RecordPosition(pointers->position());
553 __ Call(code, mode);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100554 RegisterLazyDeoptimization(instr, safepoint_mode);
Ben Murdoch18a6f572011-07-25 17:16:09 +0100555
556 // Signal that we don't inline smi code before these stubs in the
557 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000558 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdoch18a6f572011-07-25 17:16:09 +0100559 code->kind() == Code::COMPARE_IC) {
560 __ nop();
561 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100562}
563
564
Steve Block44f0eee2011-05-26 01:26:41 +0100565void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100566 int num_arguments,
567 LInstruction* instr) {
568 ASSERT(instr != NULL);
569 LPointerMap* pointers = instr->pointer_map();
570 ASSERT(pointers != NULL);
571 RecordPosition(pointers->position());
572
573 __ CallRuntime(function, num_arguments);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100574 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100575}
576
577
Ben Murdoch8b112d22011-06-08 16:22:53 +0100578void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
579 int argc,
580 LInstruction* instr) {
581 __ CallRuntimeSaveDoubles(id);
582 RecordSafepointWithRegisters(
583 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
584}
585
586
587void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
588 SafepointMode safepoint_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100589 // Create the environment to bailout to. If the call has side effects
590 // execution has to continue after the call otherwise execution can continue
591 // from a previous bailout point repeating the call.
592 LEnvironment* deoptimization_environment;
593 if (instr->HasDeoptimizationEnvironment()) {
594 deoptimization_environment = instr->deoptimization_environment();
595 } else {
596 deoptimization_environment = instr->environment();
597 }
598
599 RegisterEnvironmentForDeoptimization(deoptimization_environment);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100600 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
601 RecordSafepoint(instr->pointer_map(),
602 deoptimization_environment->deoptimization_index());
603 } else {
604 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
605 RecordSafepointWithRegisters(
606 instr->pointer_map(),
607 0,
608 deoptimization_environment->deoptimization_index());
609 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100610}
611
612
613void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
614 if (!environment->HasBeenRegistered()) {
615 // Physical stack frame layout:
616 // -x ............. -4 0 ..................................... y
617 // [incoming arguments] [spill slots] [pushed outgoing arguments]
618
619 // Layout of the environment:
620 // 0 ..................................................... size-1
621 // [parameters] [locals] [expression stack including arguments]
622
623 // Layout of the translation:
624 // 0 ........................................................ size - 1 + 4
625 // [expression stack including arguments] [locals] [4 words] [parameters]
626 // |>------------ translation_size ------------<|
627
628 int frame_count = 0;
629 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
630 ++frame_count;
631 }
632 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100633 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100634 int deoptimization_index = deoptimizations_.length();
635 environment->Register(deoptimization_index, translation.index());
636 deoptimizations_.Add(environment);
637 }
638}
639
640
641void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
642 RegisterEnvironmentForDeoptimization(environment);
643 ASSERT(environment->HasBeenRegistered());
644 int id = environment->deoptimization_index();
645 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
646 ASSERT(entry != NULL);
647 if (entry == NULL) {
648 Abort("bailout was not prepared");
649 return;
650 }
651
652 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
653
654 if (FLAG_deopt_every_n_times == 1 &&
655 info_->shared_info()->opt_count() == id) {
656 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
657 return;
658 }
659
Ben Murdoch257744e2011-11-30 15:57:28 +0000660 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt", cc);
661
Steve Block1e0659c2011-05-24 12:43:12 +0100662 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100663 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
664 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000665 // We often have several deopts to the same entry, reuse the last
666 // jump entry if this is the case.
667 if (deopt_jump_table_.is_empty() ||
668 (deopt_jump_table_.last().address != entry)) {
669 deopt_jump_table_.Add(JumpTableEntry(entry));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100670 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000671 __ b(cc, &deopt_jump_table_.last().label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100672 }
673}
674
675
676void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
677 int length = deoptimizations_.length();
678 if (length == 0) return;
679 ASSERT(FLAG_deopt);
680 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100681 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100682
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100683 Handle<ByteArray> translations = translations_.CreateByteArray();
684 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100685 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
686
687 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100688 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100689 for (int i = 0; i < deoptimization_literals_.length(); i++) {
690 literals->set(i, *deoptimization_literals_[i]);
691 }
692 data->SetLiteralArray(*literals);
693
694 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
695 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
696
697 // Populate the deoptimization entries.
698 for (int i = 0; i < length; i++) {
699 LEnvironment* env = deoptimizations_[i];
700 data->SetAstId(i, Smi::FromInt(env->ast_id()));
701 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
702 data->SetArgumentsStackHeight(i,
703 Smi::FromInt(env->arguments_stack_height()));
704 }
705 code->set_deoptimization_data(*data);
706}
707
708
709int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
710 int result = deoptimization_literals_.length();
711 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
712 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
713 }
714 deoptimization_literals_.Add(literal);
715 return result;
716}
717
718
719void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
720 ASSERT(deoptimization_literals_.length() == 0);
721
722 const ZoneList<Handle<JSFunction> >* inlined_closures =
723 chunk()->inlined_closures();
724
725 for (int i = 0, length = inlined_closures->length();
726 i < length;
727 i++) {
728 DefineDeoptimizationLiteral(inlined_closures->at(i));
729 }
730
731 inlined_function_count_ = deoptimization_literals_.length();
732}
733
734
Steve Block1e0659c2011-05-24 12:43:12 +0100735void LCodeGen::RecordSafepoint(
736 LPointerMap* pointers,
737 Safepoint::Kind kind,
738 int arguments,
739 int deoptimization_index) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100740 ASSERT(expected_safepoint_kind_ == kind);
741
Ben Murdochb0fe1622011-05-05 13:52:32 +0100742 const ZoneList<LOperand*>* operands = pointers->operands();
743 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100744 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100745 for (int i = 0; i < operands->length(); i++) {
746 LOperand* pointer = operands->at(i);
747 if (pointer->IsStackSlot()) {
748 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100749 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
750 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100751 }
752 }
Steve Block1e0659c2011-05-24 12:43:12 +0100753 if (kind & Safepoint::kWithRegisters) {
754 // Register cp always contains a pointer to the context.
755 safepoint.DefinePointerRegister(cp);
756 }
757}
758
759
760void LCodeGen::RecordSafepoint(LPointerMap* pointers,
761 int deoptimization_index) {
762 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100763}
764
765
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100766void LCodeGen::RecordSafepoint(int deoptimization_index) {
767 LPointerMap empty_pointers(RelocInfo::kNoPosition);
768 RecordSafepoint(&empty_pointers, deoptimization_index);
769}
770
771
Ben Murdochb0fe1622011-05-05 13:52:32 +0100772void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
773 int arguments,
774 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100775 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
776 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100777}
778
779
Ben Murdochb8e0da22011-05-16 14:20:40 +0100780void LCodeGen::RecordSafepointWithRegistersAndDoubles(
781 LPointerMap* pointers,
782 int arguments,
783 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100784 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
785 deoptimization_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100786}
787
788
Ben Murdochb0fe1622011-05-05 13:52:32 +0100789void LCodeGen::RecordPosition(int position) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000790 if (position == RelocInfo::kNoPosition) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100791 masm()->positions_recorder()->RecordPosition(position);
792}
793
794
795void LCodeGen::DoLabel(LLabel* label) {
796 if (label->is_loop_header()) {
797 Comment(";;; B%d - LOOP entry", label->block_id());
798 } else {
799 Comment(";;; B%d", label->block_id());
800 }
801 __ bind(label->label());
802 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +0000803 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100804}
805
806
807void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100808 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100809}
810
811
812void LCodeGen::DoGap(LGap* gap) {
813 for (int i = LGap::FIRST_INNER_POSITION;
814 i <= LGap::LAST_INNER_POSITION;
815 i++) {
816 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
817 LParallelMove* move = gap->GetParallelMove(inner_pos);
818 if (move != NULL) DoParallelMove(move);
819 }
820
821 LInstruction* next = GetNextInstruction();
822 if (next != NULL && next->IsLazyBailout()) {
823 int pc = masm()->pc_offset();
824 safepoints_.SetPcAfterGap(pc);
825 }
826}
827
828
Ben Murdoch257744e2011-11-30 15:57:28 +0000829void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
830 DoGap(instr);
831}
832
833
Ben Murdochb0fe1622011-05-05 13:52:32 +0100834void LCodeGen::DoParameter(LParameter* instr) {
835 // Nothing to do.
836}
837
838
839void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100840 ASSERT(ToRegister(instr->result()).is(r0));
841 switch (instr->hydrogen()->major_key()) {
842 case CodeStub::RegExpConstructResult: {
843 RegExpConstructResultStub stub;
844 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
845 break;
846 }
847 case CodeStub::RegExpExec: {
848 RegExpExecStub stub;
849 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
850 break;
851 }
852 case CodeStub::SubString: {
853 SubStringStub stub;
854 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
855 break;
856 }
Steve Block9fac8402011-05-12 15:51:54 +0100857 case CodeStub::NumberToString: {
858 NumberToStringStub stub;
859 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
860 break;
861 }
862 case CodeStub::StringAdd: {
863 StringAddStub stub(NO_STRING_ADD_FLAGS);
864 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
865 break;
866 }
867 case CodeStub::StringCompare: {
868 StringCompareStub stub;
869 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
870 break;
871 }
872 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100873 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100874 TranscendentalCacheStub stub(instr->transcendental_type(),
875 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100876 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100877 break;
878 }
879 default:
880 UNREACHABLE();
881 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100882}
883
884
885void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
886 // Nothing to do.
887}
888
889
890void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100891 if (instr->hydrogen()->HasPowerOf2Divisor()) {
892 Register dividend = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000893 Register result = ToRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +0100894
895 int32_t divisor =
896 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
897
898 if (divisor < 0) divisor = -divisor;
899
900 Label positive_dividend, done;
901 __ cmp(dividend, Operand(0));
902 __ b(pl, &positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000903 __ rsb(result, dividend, Operand(0));
904 __ and_(result, result, Operand(divisor - 1), SetCC);
Steve Block44f0eee2011-05-26 01:26:41 +0100905 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000906 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100907 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000908 __ rsb(result, result, Operand(0));
909 __ b(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100910 __ bind(&positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000911 __ and_(result, dividend, Operand(divisor - 1));
Steve Block44f0eee2011-05-26 01:26:41 +0100912 __ bind(&done);
913 return;
914 }
915
Ben Murdochb8e0da22011-05-16 14:20:40 +0100916 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100917 Register left = ToRegister(instr->InputAt(0));
918 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100919 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100920
Steve Block44f0eee2011-05-26 01:26:41 +0100921 Register scratch = scratch0();
922 Register scratch2 = ToRegister(instr->TempAt(0));
923 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
924 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
925 DwVfpRegister quotient = double_scratch0();
926
Steve Block44f0eee2011-05-26 01:26:41 +0100927 ASSERT(!dividend.is(divisor));
928 ASSERT(!dividend.is(quotient));
929 ASSERT(!divisor.is(quotient));
930 ASSERT(!scratch.is(left));
931 ASSERT(!scratch.is(right));
932 ASSERT(!scratch.is(result));
933
934 Label done, vfp_modulo, both_positive, right_negative;
935
Ben Murdochb8e0da22011-05-16 14:20:40 +0100936 // Check for x % 0.
937 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100938 __ cmp(right, Operand(0));
939 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100940 }
941
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000942 __ Move(result, left);
943
Steve Block44f0eee2011-05-26 01:26:41 +0100944 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100945 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100946 __ b(eq, &done);
947 // Preload right in a vfp register.
948 __ vmov(divisor.low(), right);
949 __ b(lt, &vfp_modulo);
950
951 __ cmp(left, Operand(right));
952 __ b(lt, &done);
953
954 // Check for (positive) power of two on the right hand side.
955 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
956 scratch,
957 &right_negative,
958 &both_positive);
959 // Perform modulo operation (scratch contains right - 1).
960 __ and_(result, scratch, Operand(left));
961 __ b(&done);
962
963 __ bind(&right_negative);
964 // Negate right. The sign of the divisor does not matter.
965 __ rsb(right, right, Operand(0));
966
967 __ bind(&both_positive);
968 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100969 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100970 // left hand side, the left hand side is the result.
971 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100972 __ mov(scratch, left);
973 for (int i = 0; i < kUnfolds; i++) {
974 // Check if the left hand side is less or equal than the
975 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100976 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100977 __ mov(result, scratch, LeaveCC, lt);
978 __ b(lt, &done);
979 // If not, reduce the left hand side by the right hand
980 // side and check again.
981 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
982 }
983
Steve Block44f0eee2011-05-26 01:26:41 +0100984 __ bind(&vfp_modulo);
985 // Load the arguments in VFP registers.
986 // The divisor value is preloaded before. Be careful that 'right' is only live
987 // on entry.
988 __ vmov(dividend.low(), left);
989 // From here on don't use right as it may have been reallocated (for example
990 // to scratch2).
991 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +0100992
Steve Block44f0eee2011-05-26 01:26:41 +0100993 __ vcvt_f64_s32(dividend, dividend.low());
994 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100995
Steve Block44f0eee2011-05-26 01:26:41 +0100996 // We do not care about the sign of the divisor.
997 __ vabs(divisor, divisor);
998 // Compute the quotient and round it to a 32bit integer.
999 __ vdiv(quotient, dividend, divisor);
1000 __ vcvt_s32_f64(quotient.low(), quotient);
1001 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001002
Steve Block44f0eee2011-05-26 01:26:41 +01001003 // Compute the remainder in result.
1004 DwVfpRegister double_scratch = dividend;
1005 __ vmul(double_scratch, divisor, quotient);
1006 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
1007 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001008
Steve Block44f0eee2011-05-26 01:26:41 +01001009 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1010 __ sub(result, left, scratch);
1011 } else {
1012 Label ok;
1013 // Check for -0.
1014 __ sub(scratch2, left, scratch, SetCC);
1015 __ b(ne, &ok);
1016 __ cmp(left, Operand(0));
1017 DeoptimizeIf(mi, instr->environment());
1018 __ bind(&ok);
1019 // Load the result and we are done.
1020 __ mov(result, scratch2);
1021 }
1022
Ben Murdochb8e0da22011-05-16 14:20:40 +01001023 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001024}
1025
1026
1027void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001028 class DeferredDivI: public LDeferredCode {
1029 public:
1030 DeferredDivI(LCodeGen* codegen, LDivI* instr)
1031 : LDeferredCode(codegen), instr_(instr) { }
1032 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001033 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001034 }
1035 private:
1036 LDivI* instr_;
1037 };
1038
Steve Block1e0659c2011-05-24 12:43:12 +01001039 const Register left = ToRegister(instr->InputAt(0));
1040 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001041 const Register scratch = scratch0();
1042 const Register result = ToRegister(instr->result());
1043
1044 // Check for x / 0.
1045 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001046 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001047 DeoptimizeIf(eq, instr->environment());
1048 }
1049
1050 // Check for (0 / -x) that will produce negative zero.
1051 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1052 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +01001053 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001054 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01001055 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001056 DeoptimizeIf(mi, instr->environment());
1057 __ bind(&left_not_zero);
1058 }
1059
1060 // Check for (-kMinInt / -1).
1061 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1062 Label left_not_min_int;
1063 __ cmp(left, Operand(kMinInt));
1064 __ b(ne, &left_not_min_int);
1065 __ cmp(right, Operand(-1));
1066 DeoptimizeIf(eq, instr->environment());
1067 __ bind(&left_not_min_int);
1068 }
1069
1070 Label done, deoptimize;
1071 // Test for a few common cases first.
1072 __ cmp(right, Operand(1));
1073 __ mov(result, left, LeaveCC, eq);
1074 __ b(eq, &done);
1075
1076 __ cmp(right, Operand(2));
1077 __ tst(left, Operand(1), eq);
1078 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1079 __ b(eq, &done);
1080
1081 __ cmp(right, Operand(4));
1082 __ tst(left, Operand(3), eq);
1083 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1084 __ b(eq, &done);
1085
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001086 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001087 // to be tagged to Smis. If that is not possible, deoptimize.
1088 DeferredDivI* deferred = new DeferredDivI(this, instr);
1089
1090 __ TrySmiTag(left, &deoptimize, scratch);
1091 __ TrySmiTag(right, &deoptimize, scratch);
1092
1093 __ b(al, deferred->entry());
1094 __ bind(deferred->exit());
1095
1096 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001097 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001098 __ SmiUntag(result);
1099 __ b(&done);
1100
1101 __ bind(&deoptimize);
1102 DeoptimizeIf(al, instr->environment());
1103 __ bind(&done);
1104}
1105
1106
Steve Block1e0659c2011-05-24 12:43:12 +01001107template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001108void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1109 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001110 Register left = ToRegister(instr->InputAt(0));
1111 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001112
Ben Murdoch8b112d22011-06-08 16:22:53 +01001113 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001114 // Move left to r1 and right to r0 for the stub call.
1115 if (left.is(r1)) {
1116 __ Move(r0, right);
1117 } else if (left.is(r0) && right.is(r1)) {
1118 __ Swap(r0, r1, r2);
1119 } else if (left.is(r0)) {
1120 ASSERT(!right.is(r1));
1121 __ mov(r1, r0);
1122 __ mov(r0, right);
1123 } else {
1124 ASSERT(!left.is(r0) && !right.is(r0));
1125 __ mov(r0, right);
1126 __ mov(r1, left);
1127 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001128 BinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001129 __ CallStub(&stub);
1130 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1131 0,
1132 Safepoint::kNoDeoptimizationIndex);
1133 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001134 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001135}
1136
1137
1138void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001139 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001140 Register result = ToRegister(instr->result());
1141 // Note that result may alias left.
Steve Block1e0659c2011-05-24 12:43:12 +01001142 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001143 LOperand* right_op = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001144
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001145 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1146 bool bailout_on_minus_zero =
1147 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001148
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001149 if (right_op->IsConstantOperand() && !can_overflow) {
1150 // Use optimized code for specific constants.
1151 int32_t constant = ToInteger32(LConstantOperand::cast(right_op));
1152
1153 if (bailout_on_minus_zero && (constant < 0)) {
1154 // The case of a null constant will be handled separately.
1155 // If constant is negative and left is null, the result should be -0.
1156 __ cmp(left, Operand(0));
1157 DeoptimizeIf(eq, instr->environment());
1158 }
1159
1160 switch (constant) {
1161 case -1:
1162 __ rsb(result, left, Operand(0));
1163 break;
1164 case 0:
1165 if (bailout_on_minus_zero) {
1166 // If left is strictly negative and the constant is null, the
1167 // result is -0. Deoptimize if required, otherwise return 0.
1168 __ cmp(left, Operand(0));
1169 DeoptimizeIf(mi, instr->environment());
1170 }
1171 __ mov(result, Operand(0));
1172 break;
1173 case 1:
1174 __ Move(result, left);
1175 break;
1176 default:
1177 // Multiplying by powers of two and powers of two plus or minus
1178 // one can be done faster with shifted operands.
1179 // For other constants we emit standard code.
1180 int32_t mask = constant >> 31;
1181 uint32_t constant_abs = (constant + mask) ^ mask;
1182
1183 if (IsPowerOf2(constant_abs) ||
1184 IsPowerOf2(constant_abs - 1) ||
1185 IsPowerOf2(constant_abs + 1)) {
1186 if (IsPowerOf2(constant_abs)) {
1187 int32_t shift = WhichPowerOf2(constant_abs);
1188 __ mov(result, Operand(left, LSL, shift));
1189 } else if (IsPowerOf2(constant_abs - 1)) {
1190 int32_t shift = WhichPowerOf2(constant_abs - 1);
1191 __ add(result, left, Operand(left, LSL, shift));
1192 } else if (IsPowerOf2(constant_abs + 1)) {
1193 int32_t shift = WhichPowerOf2(constant_abs + 1);
1194 __ rsb(result, left, Operand(left, LSL, shift));
1195 }
1196
1197 // Correct the sign of the result is the constant is negative.
1198 if (constant < 0) __ rsb(result, result, Operand(0));
1199
1200 } else {
1201 // Generate standard code.
1202 __ mov(ip, Operand(constant));
1203 __ mul(result, left, ip);
1204 }
1205 }
1206
Ben Murdochb0fe1622011-05-05 13:52:32 +01001207 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001208 Register right = EmitLoadRegister(right_op, scratch);
1209 if (bailout_on_minus_zero) {
1210 __ orr(ToRegister(instr->TempAt(0)), left, right);
1211 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001212
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001213 if (can_overflow) {
1214 // scratch:result = left * right.
1215 __ smull(result, scratch, left, right);
1216 __ cmp(scratch, Operand(result, ASR, 31));
1217 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001218 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001219 __ mul(result, left, right);
1220 }
1221
1222 if (bailout_on_minus_zero) {
1223 // Bail out if the result is supposed to be negative zero.
1224 Label done;
1225 __ cmp(result, Operand(0));
1226 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001227 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001228 DeoptimizeIf(mi, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001229 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001230 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001231 }
1232}
1233
1234
1235void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001236 LOperand* left_op = instr->InputAt(0);
1237 LOperand* right_op = instr->InputAt(1);
1238 ASSERT(left_op->IsRegister());
1239 Register left = ToRegister(left_op);
1240 Register result = ToRegister(instr->result());
1241 Operand right(no_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001242
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001243 if (right_op->IsStackSlot() || right_op->IsArgument()) {
1244 right = Operand(EmitLoadRegister(right_op, ip));
Steve Block44f0eee2011-05-26 01:26:41 +01001245 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001246 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand());
1247 right = ToOperand(right_op);
Steve Block44f0eee2011-05-26 01:26:41 +01001248 }
1249
Ben Murdochb0fe1622011-05-05 13:52:32 +01001250 switch (instr->op()) {
1251 case Token::BIT_AND:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001252 __ and_(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001253 break;
1254 case Token::BIT_OR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001255 __ orr(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001256 break;
1257 case Token::BIT_XOR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001258 __ eor(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001259 break;
1260 default:
1261 UNREACHABLE();
1262 break;
1263 }
1264}
1265
1266
1267void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001268 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
1269 // result may alias either of them.
1270 LOperand* right_op = instr->InputAt(1);
1271 Register left = ToRegister(instr->InputAt(0));
1272 Register result = ToRegister(instr->result());
Steve Block9fac8402011-05-12 15:51:54 +01001273 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001274 if (right_op->IsRegister()) {
1275 // Mask the right_op operand.
1276 __ and_(scratch, ToRegister(right_op), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001277 switch (instr->op()) {
1278 case Token::SAR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001279 __ mov(result, Operand(left, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001280 break;
1281 case Token::SHR:
1282 if (instr->can_deopt()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001283 __ mov(result, Operand(left, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001284 DeoptimizeIf(mi, instr->environment());
1285 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001286 __ mov(result, Operand(left, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001287 }
1288 break;
1289 case Token::SHL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001290 __ mov(result, Operand(left, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001291 break;
1292 default:
1293 UNREACHABLE();
1294 break;
1295 }
1296 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001297 // Mask the right_op operand.
1298 int value = ToInteger32(LConstantOperand::cast(right_op));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001299 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1300 switch (instr->op()) {
1301 case Token::SAR:
1302 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001303 __ mov(result, Operand(left, ASR, shift_count));
1304 } else {
1305 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001306 }
1307 break;
1308 case Token::SHR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001309 if (shift_count != 0) {
1310 __ mov(result, Operand(left, LSR, shift_count));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001311 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001312 if (instr->can_deopt()) {
1313 __ tst(left, Operand(0x80000000));
1314 DeoptimizeIf(ne, instr->environment());
1315 }
1316 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001317 }
1318 break;
1319 case Token::SHL:
1320 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001321 __ mov(result, Operand(left, LSL, shift_count));
1322 } else {
1323 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001324 }
1325 break;
1326 default:
1327 UNREACHABLE();
1328 break;
1329 }
1330 }
1331}
1332
1333
1334void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001335 LOperand* left = instr->InputAt(0);
1336 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001337 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001338 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1339 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1340
1341 if (right->IsStackSlot() || right->IsArgument()) {
1342 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001343 __ sub(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001344 } else {
1345 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001346 __ sub(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001347 }
1348
1349 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001350 DeoptimizeIf(vs, instr->environment());
1351 }
1352}
1353
1354
1355void LCodeGen::DoConstantI(LConstantI* instr) {
1356 ASSERT(instr->result()->IsRegister());
1357 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1358}
1359
1360
1361void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001362 ASSERT(instr->result()->IsDoubleRegister());
1363 DwVfpRegister result = ToDoubleRegister(instr->result());
1364 double v = instr->value();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001365 __ Vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001366}
1367
1368
1369void LCodeGen::DoConstantT(LConstantT* instr) {
1370 ASSERT(instr->result()->IsRegister());
1371 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1372}
1373
1374
Steve Block9fac8402011-05-12 15:51:54 +01001375void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001376 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001377 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001378 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1379}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001380
Ben Murdochb0fe1622011-05-05 13:52:32 +01001381
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001382void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001383 Register result = ToRegister(instr->result());
1384 Register array = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001385 __ ldr(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001386}
1387
1388
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001389void LCodeGen::DoElementsKind(LElementsKind* instr) {
1390 Register result = ToRegister(instr->result());
1391 Register input = ToRegister(instr->InputAt(0));
1392
1393 // Load map into |result|.
1394 __ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
1395 // Load the map's "bit field 2" into |result|. We only need the first byte,
1396 // but the following bit field extraction takes care of that anyway.
1397 __ ldr(result, FieldMemOperand(result, Map::kBitField2Offset));
1398 // Retrieve elements_kind from bit field 2.
1399 __ ubfx(result, result, Map::kElementsKindShift, Map::kElementsKindBitCount);
1400}
1401
1402
Ben Murdochb0fe1622011-05-05 13:52:32 +01001403void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001404 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001405 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001406 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001407 Label done;
1408
1409 // If the object is a smi return the object.
1410 __ tst(input, Operand(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001411 __ Move(result, input, eq);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001412 __ b(eq, &done);
1413
1414 // If the object is not a value type, return the object.
1415 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001416 __ Move(result, input, ne);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001417 __ b(ne, &done);
1418 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1419
1420 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001421}
1422
1423
1424void LCodeGen::DoBitNotI(LBitNotI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001425 Register input = ToRegister(instr->InputAt(0));
1426 Register result = ToRegister(instr->result());
1427 __ mvn(result, Operand(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001428}
1429
1430
1431void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001432 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001433 __ push(input_reg);
1434 CallRuntime(Runtime::kThrow, 1, instr);
1435
1436 if (FLAG_debug_code) {
1437 __ stop("Unreachable code.");
1438 }
1439}
1440
1441
1442void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001443 LOperand* left = instr->InputAt(0);
1444 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001445 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001446 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1447 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001448
Steve Block44f0eee2011-05-26 01:26:41 +01001449 if (right->IsStackSlot() || right->IsArgument()) {
1450 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001451 __ add(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001452 } else {
1453 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001454 __ add(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001455 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001456
Steve Block44f0eee2011-05-26 01:26:41 +01001457 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001458 DeoptimizeIf(vs, instr->environment());
1459 }
1460}
1461
1462
1463void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001464 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1465 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001466 DoubleRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001467 switch (instr->op()) {
1468 case Token::ADD:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001469 __ vadd(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001470 break;
1471 case Token::SUB:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001472 __ vsub(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001473 break;
1474 case Token::MUL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001475 __ vmul(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001476 break;
1477 case Token::DIV:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001478 __ vdiv(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001479 break;
1480 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001481 // Save r0-r3 on the stack.
1482 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1483
Ben Murdoch257744e2011-11-30 15:57:28 +00001484 __ PrepareCallCFunction(0, 2, scratch0());
1485 __ SetCallCDoubleArguments(left, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001486 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00001487 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1488 0, 2);
Steve Block1e0659c2011-05-24 12:43:12 +01001489 // Move the result in the double result register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001490 __ GetCFunctionDoubleResult(result);
Steve Block1e0659c2011-05-24 12:43:12 +01001491
1492 // Restore r0-r3.
1493 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001494 break;
1495 }
1496 default:
1497 UNREACHABLE();
1498 break;
1499 }
1500}
1501
1502
1503void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001504 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1505 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001506 ASSERT(ToRegister(instr->result()).is(r0));
1507
Ben Murdoch257744e2011-11-30 15:57:28 +00001508 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001509 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001510 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001511}
1512
1513
1514int LCodeGen::GetNextEmittedBlock(int block) {
1515 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1516 LLabel* label = chunk_->GetLabel(i);
1517 if (!label->HasReplacement()) return i;
1518 }
1519 return -1;
1520}
1521
1522
1523void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1524 int next_block = GetNextEmittedBlock(current_block_);
1525 right_block = chunk_->LookupDestination(right_block);
1526 left_block = chunk_->LookupDestination(left_block);
1527
1528 if (right_block == left_block) {
1529 EmitGoto(left_block);
1530 } else if (left_block == next_block) {
1531 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1532 } else if (right_block == next_block) {
1533 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1534 } else {
1535 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1536 __ b(chunk_->GetAssemblyLabel(right_block));
1537 }
1538}
1539
1540
1541void LCodeGen::DoBranch(LBranch* instr) {
1542 int true_block = chunk_->LookupDestination(instr->true_block_id());
1543 int false_block = chunk_->LookupDestination(instr->false_block_id());
1544
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001545 Representation r = instr->hydrogen()->value()->representation();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001546 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001547 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001548 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001549 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001550 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001551 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001552 Register scratch = scratch0();
1553
Ben Murdochb8e0da22011-05-16 14:20:40 +01001554 // Test the double value. Zero and NaN are false.
1555 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1556 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch6d7cb002011-08-04 19:25:22 +01001557 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001558 } else {
1559 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001560 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001561 HType type = instr->hydrogen()->value()->type();
1562 if (type.IsBoolean()) {
1563 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001564 EmitBranch(true_block, false_block, eq);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001565 } else if (type.IsSmi()) {
1566 __ cmp(reg, Operand(0));
1567 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001568 } else {
1569 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1570 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1571
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001572 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1573 // Avoid deopts in the case where we've never executed this path before.
1574 if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001575
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001576 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1577 // undefined -> false.
1578 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1579 __ b(eq, false_label);
1580 }
1581 if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1582 // Boolean -> its value.
1583 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1584 __ b(eq, true_label);
1585 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1586 __ b(eq, false_label);
1587 }
1588 if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1589 // 'null' -> false.
1590 __ CompareRoot(reg, Heap::kNullValueRootIndex);
1591 __ b(eq, false_label);
1592 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001593
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001594 if (expected.Contains(ToBooleanStub::SMI)) {
1595 // Smis: 0 -> false, all other -> true.
1596 __ cmp(reg, Operand(0));
1597 __ b(eq, false_label);
1598 __ JumpIfSmi(reg, true_label);
1599 } else if (expected.NeedsMap()) {
1600 // If we need a map later and have a Smi -> deopt.
1601 __ tst(reg, Operand(kSmiTagMask));
1602 DeoptimizeIf(eq, instr->environment());
1603 }
1604
1605 const Register map = scratch0();
1606 if (expected.NeedsMap()) {
1607 __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset));
1608
1609 if (expected.CanBeUndetectable()) {
1610 // Undetectable -> false.
1611 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
1612 __ tst(ip, Operand(1 << Map::kIsUndetectable));
1613 __ b(ne, false_label);
1614 }
1615 }
1616
1617 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1618 // spec object -> true.
1619 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
1620 __ b(ge, true_label);
1621 }
1622
1623 if (expected.Contains(ToBooleanStub::STRING)) {
1624 // String value -> false iff empty.
1625 Label not_string;
1626 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
1627 __ b(ge, &not_string);
1628 __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset));
1629 __ cmp(ip, Operand(0));
1630 __ b(ne, true_label);
1631 __ b(false_label);
1632 __ bind(&not_string);
1633 }
1634
1635 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1636 // heap number -> false iff +0, -0, or NaN.
1637 DoubleRegister dbl_scratch = double_scratch0();
1638 Label not_heap_number;
1639 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1640 __ b(ne, &not_heap_number);
1641 __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
1642 __ VFPCompareAndSetFlags(dbl_scratch, 0.0);
1643 __ b(vs, false_label); // NaN -> false.
1644 __ b(eq, false_label); // +0, -0 -> false.
1645 __ b(true_label);
1646 __ bind(&not_heap_number);
1647 }
1648
1649 // We've seen something for the first time -> deopt.
1650 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001651 }
1652 }
1653}
1654
1655
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001656void LCodeGen::EmitGoto(int block) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001657 block = chunk_->LookupDestination(block);
1658 int next_block = GetNextEmittedBlock(current_block_);
1659 if (block != next_block) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001660 __ jmp(chunk_->GetAssemblyLabel(block));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001661 }
1662}
1663
1664
Ben Murdochb0fe1622011-05-05 13:52:32 +01001665void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001666 EmitGoto(instr->block_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001667}
1668
1669
1670Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001671 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001672 switch (op) {
1673 case Token::EQ:
1674 case Token::EQ_STRICT:
1675 cond = eq;
1676 break;
1677 case Token::LT:
1678 cond = is_unsigned ? lo : lt;
1679 break;
1680 case Token::GT:
1681 cond = is_unsigned ? hi : gt;
1682 break;
1683 case Token::LTE:
1684 cond = is_unsigned ? ls : le;
1685 break;
1686 case Token::GTE:
1687 cond = is_unsigned ? hs : ge;
1688 break;
1689 case Token::IN:
1690 case Token::INSTANCEOF:
1691 default:
1692 UNREACHABLE();
1693 }
1694 return cond;
1695}
1696
1697
1698void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001699 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001700}
1701
1702
Ben Murdochb0fe1622011-05-05 13:52:32 +01001703void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001704 LOperand* left = instr->InputAt(0);
1705 LOperand* right = instr->InputAt(1);
1706 int false_block = chunk_->LookupDestination(instr->false_block_id());
1707 int true_block = chunk_->LookupDestination(instr->true_block_id());
1708
1709 if (instr->is_double()) {
1710 // Compare left and right as doubles and load the
1711 // resulting flags into the normal status register.
1712 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1713 // If a NaN is involved, i.e. the result is unordered (V set),
1714 // jump to false block label.
1715 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1716 } else {
1717 EmitCmpI(left, right);
1718 }
1719
1720 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1721 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001722}
1723
1724
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001725void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001726 Register left = ToRegister(instr->InputAt(0));
1727 Register right = ToRegister(instr->InputAt(1));
1728 int false_block = chunk_->LookupDestination(instr->false_block_id());
1729 int true_block = chunk_->LookupDestination(instr->true_block_id());
1730
1731 __ cmp(left, Operand(right));
1732 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001733}
1734
1735
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001736void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001737 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001738 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001739 int false_block = chunk_->LookupDestination(instr->false_block_id());
Ben Murdoch257744e2011-11-30 15:57:28 +00001740
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001741 __ cmp(left, Operand(instr->hydrogen()->right()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001742 EmitBranch(true_block, false_block, eq);
1743}
1744
1745
Ben Murdochb0fe1622011-05-05 13:52:32 +01001746void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001747 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001748 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001749
1750 // TODO(fsc): If the expression is known to be a smi, then it's
1751 // definitely not null. Jump to the false block.
1752
1753 int true_block = chunk_->LookupDestination(instr->true_block_id());
1754 int false_block = chunk_->LookupDestination(instr->false_block_id());
1755
1756 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1757 __ cmp(reg, ip);
1758 if (instr->is_strict()) {
1759 EmitBranch(true_block, false_block, eq);
1760 } else {
1761 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1762 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1763 __ b(eq, true_label);
1764 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1765 __ cmp(reg, ip);
1766 __ b(eq, true_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001767 __ JumpIfSmi(reg, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001768 // Check for undetectable objects by looking in the bit field in
1769 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001770 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1771 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1772 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1773 EmitBranch(true_block, false_block, ne);
1774 }
1775}
1776
1777
1778Condition LCodeGen::EmitIsObject(Register input,
1779 Register temp1,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001780 Label* is_not_object,
1781 Label* is_object) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001782 Register temp2 = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001783 __ JumpIfSmi(input, is_not_object);
1784
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001785 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
1786 __ cmp(input, temp2);
Steve Block1e0659c2011-05-24 12:43:12 +01001787 __ b(eq, is_object);
1788
1789 // Load map.
1790 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1791 // Undetectable objects behave like undefined.
1792 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1793 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1794 __ b(ne, is_not_object);
1795
1796 // Load instance type and check that it is in object type range.
1797 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001798 __ cmp(temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001799 __ b(lt, is_not_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001800 __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001801 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001802}
1803
1804
Ben Murdochb0fe1622011-05-05 13:52:32 +01001805void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001806 Register reg = ToRegister(instr->InputAt(0));
1807 Register temp1 = ToRegister(instr->TempAt(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001808
1809 int true_block = chunk_->LookupDestination(instr->true_block_id());
1810 int false_block = chunk_->LookupDestination(instr->false_block_id());
1811 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1812 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1813
1814 Condition true_cond =
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001815 EmitIsObject(reg, temp1, false_label, true_label);
Steve Block1e0659c2011-05-24 12:43:12 +01001816
1817 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001818}
1819
1820
Ben Murdochb0fe1622011-05-05 13:52:32 +01001821void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1822 int true_block = chunk_->LookupDestination(instr->true_block_id());
1823 int false_block = chunk_->LookupDestination(instr->false_block_id());
1824
Steve Block1e0659c2011-05-24 12:43:12 +01001825 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001826 __ tst(input_reg, Operand(kSmiTagMask));
1827 EmitBranch(true_block, false_block, eq);
1828}
1829
1830
Ben Murdoch257744e2011-11-30 15:57:28 +00001831void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1832 Register input = ToRegister(instr->InputAt(0));
1833 Register temp = ToRegister(instr->TempAt(0));
1834
1835 int true_block = chunk_->LookupDestination(instr->true_block_id());
1836 int false_block = chunk_->LookupDestination(instr->false_block_id());
1837
1838 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1839 __ ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
1840 __ ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
1841 __ tst(temp, Operand(1 << Map::kIsUndetectable));
1842 EmitBranch(true_block, false_block, ne);
1843}
1844
1845
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001846static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001847 InstanceType from = instr->from();
1848 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001849 if (from == FIRST_TYPE) return to;
1850 ASSERT(from == to || to == LAST_TYPE);
1851 return from;
1852}
1853
1854
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001855static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001856 InstanceType from = instr->from();
1857 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001858 if (from == to) return eq;
1859 if (to == LAST_TYPE) return hs;
1860 if (from == FIRST_TYPE) return ls;
1861 UNREACHABLE();
1862 return eq;
1863}
1864
1865
Ben Murdochb0fe1622011-05-05 13:52:32 +01001866void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001867 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001868 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001869
1870 int true_block = chunk_->LookupDestination(instr->true_block_id());
1871 int false_block = chunk_->LookupDestination(instr->false_block_id());
1872
1873 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1874
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001875 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001876
Steve Block1e0659c2011-05-24 12:43:12 +01001877 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1878 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001879}
1880
1881
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001882void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1883 Register input = ToRegister(instr->InputAt(0));
1884 Register result = ToRegister(instr->result());
1885
1886 if (FLAG_debug_code) {
1887 __ AbortIfNotString(input);
1888 }
1889
1890 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1891 __ IndexFromHash(result, result);
1892}
1893
1894
Ben Murdochb0fe1622011-05-05 13:52:32 +01001895void LCodeGen::DoHasCachedArrayIndexAndBranch(
1896 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001897 Register input = ToRegister(instr->InputAt(0));
1898 Register scratch = scratch0();
1899
1900 int true_block = chunk_->LookupDestination(instr->true_block_id());
1901 int false_block = chunk_->LookupDestination(instr->false_block_id());
1902
1903 __ ldr(scratch,
1904 FieldMemOperand(input, String::kHashFieldOffset));
1905 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1906 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001907}
1908
1909
Ben Murdochb8e0da22011-05-16 14:20:40 +01001910// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001911// the temp registers, but not the input. Only input and temp2 may alias.
1912void LCodeGen::EmitClassOfTest(Label* is_true,
1913 Label* is_false,
1914 Handle<String>class_name,
1915 Register input,
1916 Register temp,
1917 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001918 ASSERT(!input.is(temp));
1919 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001920 __ JumpIfSmi(input, is_false);
1921 __ CompareObjectType(input, temp, temp2, FIRST_SPEC_OBJECT_TYPE);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001922 __ b(lt, is_false);
1923
1924 // Map is now in temp.
1925 // Functions have class 'Function'.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001926 __ CompareInstanceType(temp, temp2, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001927 if (class_name->IsEqualTo(CStrVector("Function"))) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001928 __ b(ge, is_true);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001929 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001930 __ b(ge, is_false);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001931 }
1932
1933 // Check if the constructor in the map is a function.
1934 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1935
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001936 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type and
1937 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1938 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1939 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1940 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1941 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001942
1943 // Objects with a non-function constructor have class 'Object'.
1944 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1945 if (class_name->IsEqualTo(CStrVector("Object"))) {
1946 __ b(ne, is_true);
1947 } else {
1948 __ b(ne, is_false);
1949 }
1950
1951 // temp now contains the constructor function. Grab the
1952 // instance class name from there.
1953 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1954 __ ldr(temp, FieldMemOperand(temp,
1955 SharedFunctionInfo::kInstanceClassNameOffset));
1956 // The class name we are testing against is a symbol because it's a literal.
1957 // The name in the constructor is a symbol because of the way the context is
1958 // booted. This routine isn't expected to work for random API-created
1959 // classes and it doesn't have to because you can't access it with natives
1960 // syntax. Since both sides are symbols it is sufficient to use an identity
1961 // comparison.
1962 __ cmp(temp, Operand(class_name));
1963 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001964}
1965
1966
Ben Murdochb0fe1622011-05-05 13:52:32 +01001967void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001968 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001969 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001970 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001971 Handle<String> class_name = instr->hydrogen()->class_name();
1972
1973 int true_block = chunk_->LookupDestination(instr->true_block_id());
1974 int false_block = chunk_->LookupDestination(instr->false_block_id());
1975
1976 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1977 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1978
1979 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1980
1981 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001982}
1983
1984
1985void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001986 Register reg = ToRegister(instr->InputAt(0));
1987 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001988 int true_block = instr->true_block_id();
1989 int false_block = instr->false_block_id();
1990
1991 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1992 __ cmp(temp, Operand(instr->map()));
1993 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001994}
1995
1996
1997void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001998 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1999 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01002000
Ben Murdochb0fe1622011-05-05 13:52:32 +01002001 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2002 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2003
Steve Block44f0eee2011-05-26 01:26:41 +01002004 __ cmp(r0, Operand(0));
2005 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
2006 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002007}
2008
2009
Ben Murdoch086aeea2011-05-13 15:57:08 +01002010void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002011 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2012 public:
2013 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2014 LInstanceOfKnownGlobal* instr)
2015 : LDeferredCode(codegen), instr_(instr) { }
2016 virtual void Generate() {
2017 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
2018 }
2019
2020 Label* map_check() { return &map_check_; }
2021
2022 private:
2023 LInstanceOfKnownGlobal* instr_;
2024 Label map_check_;
2025 };
2026
2027 DeferredInstanceOfKnownGlobal* deferred;
2028 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2029
2030 Label done, false_result;
2031 Register object = ToRegister(instr->InputAt(0));
2032 Register temp = ToRegister(instr->TempAt(0));
2033 Register result = ToRegister(instr->result());
2034
2035 ASSERT(object.is(r0));
2036 ASSERT(result.is(r0));
2037
2038 // A Smi is not instance of anything.
2039 __ JumpIfSmi(object, &false_result);
2040
2041 // This is the inlined call site instanceof cache. The two occurences of the
2042 // hole value will be patched to the last map/result pair generated by the
2043 // instanceof stub.
2044 Label cache_miss;
2045 Register map = temp;
2046 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2047 __ bind(deferred->map_check()); // Label for calculating code patching.
2048 // We use Factory::the_hole_value() on purpose instead of loading from the
2049 // root array to force relocation to be able to later patch with
2050 // the cached map.
Steve Block44f0eee2011-05-26 01:26:41 +01002051 __ mov(ip, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002052 __ cmp(map, Operand(ip));
2053 __ b(ne, &cache_miss);
2054 // We use Factory::the_hole_value() on purpose instead of loading from the
2055 // root array to force relocation to be able to later patch
2056 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002057 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002058 __ b(&done);
2059
2060 // The inlined call site cache did not match. Check null and string before
2061 // calling the deferred code.
2062 __ bind(&cache_miss);
2063 // Null is not instance of anything.
2064 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2065 __ cmp(object, Operand(ip));
2066 __ b(eq, &false_result);
2067
2068 // String values is not instance of anything.
2069 Condition is_string = masm_->IsObjectStringType(object, temp);
2070 __ b(is_string, &false_result);
2071
2072 // Go to the deferred code.
2073 __ b(deferred->entry());
2074
2075 __ bind(&false_result);
2076 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2077
2078 // Here result has either true or false. Deferred code also produces true or
2079 // false object.
2080 __ bind(deferred->exit());
2081 __ bind(&done);
2082}
2083
2084
2085void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2086 Label* map_check) {
2087 Register result = ToRegister(instr->result());
2088 ASSERT(result.is(r0));
2089
2090 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2091 flags = static_cast<InstanceofStub::Flags>(
2092 flags | InstanceofStub::kArgsInRegisters);
2093 flags = static_cast<InstanceofStub::Flags>(
2094 flags | InstanceofStub::kCallSiteInlineCheck);
2095 flags = static_cast<InstanceofStub::Flags>(
2096 flags | InstanceofStub::kReturnTrueFalseObject);
2097 InstanceofStub stub(flags);
2098
Ben Murdoch8b112d22011-06-08 16:22:53 +01002099 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002100
2101 // Get the temp register reserved by the instruction. This needs to be r4 as
2102 // its slot of the pushing of safepoint registers is used to communicate the
2103 // offset to the location of the map check.
2104 Register temp = ToRegister(instr->TempAt(0));
2105 ASSERT(temp.is(r4));
2106 __ mov(InstanceofStub::right(), Operand(instr->function()));
2107 static const int kAdditionalDelta = 4;
2108 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2109 Label before_push_delta;
2110 __ bind(&before_push_delta);
2111 __ BlockConstPoolFor(kAdditionalDelta);
2112 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002113 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002114 CallCodeGeneric(stub.GetCode(),
2115 RelocInfo::CODE_TARGET,
2116 instr,
2117 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Steve Block1e0659c2011-05-24 12:43:12 +01002118 // Put the result value into the result register slot and
2119 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002120 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002121}
2122
Ben Murdochb0fe1622011-05-05 13:52:32 +01002123
2124static Condition ComputeCompareCondition(Token::Value op) {
2125 switch (op) {
2126 case Token::EQ_STRICT:
2127 case Token::EQ:
2128 return eq;
2129 case Token::LT:
2130 return lt;
2131 case Token::GT:
2132 return gt;
2133 case Token::LTE:
2134 return le;
2135 case Token::GTE:
2136 return ge;
2137 default:
2138 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002139 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002140 }
2141}
2142
2143
2144void LCodeGen::DoCmpT(LCmpT* instr) {
2145 Token::Value op = instr->op();
2146
2147 Handle<Code> ic = CompareIC::GetUninitialized(op);
2148 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002149 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002150
2151 Condition condition = ComputeCompareCondition(op);
2152 if (op == Token::GT || op == Token::LTE) {
2153 condition = ReverseCondition(condition);
2154 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002155 __ LoadRoot(ToRegister(instr->result()),
2156 Heap::kTrueValueRootIndex,
2157 condition);
2158 __ LoadRoot(ToRegister(instr->result()),
2159 Heap::kFalseValueRootIndex,
2160 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002161}
2162
2163
Ben Murdochb0fe1622011-05-05 13:52:32 +01002164void LCodeGen::DoReturn(LReturn* instr) {
2165 if (FLAG_trace) {
2166 // Push the return value on the stack as the parameter.
2167 // Runtime::TraceExit returns its parameter in r0.
2168 __ push(r0);
2169 __ CallRuntime(Runtime::kTraceExit, 1);
2170 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002171 int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002172 __ mov(sp, fp);
2173 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2174 __ add(sp, sp, Operand(sp_delta));
2175 __ Jump(lr);
2176}
2177
2178
Ben Murdoch8b112d22011-06-08 16:22:53 +01002179void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002180 Register result = ToRegister(instr->result());
2181 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2182 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2183 if (instr->hydrogen()->check_hole_value()) {
2184 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2185 __ cmp(result, ip);
2186 DeoptimizeIf(eq, instr->environment());
2187 }
2188}
2189
2190
Ben Murdoch8b112d22011-06-08 16:22:53 +01002191void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2192 ASSERT(ToRegister(instr->global_object()).is(r0));
2193 ASSERT(ToRegister(instr->result()).is(r0));
2194
2195 __ mov(r2, Operand(instr->name()));
2196 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2197 : RelocInfo::CODE_TARGET_CONTEXT;
2198 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2199 CallCode(ic, mode, instr);
2200}
2201
2202
2203void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002204 Register value = ToRegister(instr->InputAt(0));
2205 Register scratch = scratch0();
2206
2207 // Load the cell.
2208 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2209
2210 // If the cell we are storing to contains the hole it could have
2211 // been deleted from the property dictionary. In that case, we need
2212 // to update the property details in the property dictionary to mark
2213 // it as no longer deleted.
2214 if (instr->hydrogen()->check_hole_value()) {
2215 Register scratch2 = ToRegister(instr->TempAt(0));
2216 __ ldr(scratch2,
2217 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2218 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2219 __ cmp(scratch2, ip);
2220 DeoptimizeIf(eq, instr->environment());
2221 }
2222
2223 // Store the value.
2224 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002225}
2226
2227
Ben Murdoch8b112d22011-06-08 16:22:53 +01002228void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2229 ASSERT(ToRegister(instr->global_object()).is(r1));
2230 ASSERT(ToRegister(instr->value()).is(r0));
2231
2232 __ mov(r2, Operand(instr->name()));
2233 Handle<Code> ic = instr->strict_mode()
2234 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2235 : isolate()->builtins()->StoreIC_Initialize();
2236 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2237}
2238
2239
Ben Murdochb8e0da22011-05-16 14:20:40 +01002240void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002241 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002242 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002243 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002244}
2245
2246
Steve Block1e0659c2011-05-24 12:43:12 +01002247void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2248 Register context = ToRegister(instr->context());
2249 Register value = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002250 __ str(value, ContextOperand(context, instr->slot_index()));
2251 if (instr->needs_write_barrier()) {
2252 int offset = Context::SlotOffset(instr->slot_index());
2253 __ RecordWrite(context, Operand(offset), value, scratch0());
2254 }
2255}
2256
2257
Ben Murdochb0fe1622011-05-05 13:52:32 +01002258void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002259 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002260 Register result = ToRegister(instr->result());
2261 if (instr->hydrogen()->is_in_object()) {
2262 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2263 } else {
2264 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2265 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2266 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002267}
2268
2269
Ben Murdoch257744e2011-11-30 15:57:28 +00002270void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2271 Register object,
2272 Handle<Map> type,
2273 Handle<String> name) {
Steve Block44f0eee2011-05-26 01:26:41 +01002274 LookupResult lookup;
2275 type->LookupInDescriptors(NULL, *name, &lookup);
Ben Murdoch257744e2011-11-30 15:57:28 +00002276 ASSERT(lookup.IsProperty() &&
2277 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2278 if (lookup.type() == FIELD) {
2279 int index = lookup.GetLocalFieldIndexFromMap(*type);
2280 int offset = index * kPointerSize;
2281 if (index < 0) {
2282 // Negative property indices are in-object properties, indexed
2283 // from the end of the fixed part of the object.
2284 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2285 } else {
2286 // Non-negative property indices are in the properties array.
2287 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2288 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2289 }
Steve Block44f0eee2011-05-26 01:26:41 +01002290 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002291 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
2292 LoadHeapObject(result, Handle<HeapObject>::cast(function));
Steve Block44f0eee2011-05-26 01:26:41 +01002293 }
2294}
2295
2296
2297void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2298 Register object = ToRegister(instr->object());
2299 Register result = ToRegister(instr->result());
2300 Register scratch = scratch0();
2301 int map_count = instr->hydrogen()->types()->length();
2302 Handle<String> name = instr->hydrogen()->name();
2303 if (map_count == 0) {
2304 ASSERT(instr->hydrogen()->need_generic());
2305 __ mov(r2, Operand(name));
2306 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2307 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2308 } else {
2309 Label done;
2310 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2311 for (int i = 0; i < map_count - 1; ++i) {
2312 Handle<Map> map = instr->hydrogen()->types()->at(i);
2313 Label next;
2314 __ cmp(scratch, Operand(map));
2315 __ b(ne, &next);
Ben Murdoch257744e2011-11-30 15:57:28 +00002316 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002317 __ b(&done);
2318 __ bind(&next);
2319 }
2320 Handle<Map> map = instr->hydrogen()->types()->last();
2321 __ cmp(scratch, Operand(map));
2322 if (instr->hydrogen()->need_generic()) {
2323 Label generic;
2324 __ b(ne, &generic);
Ben Murdoch257744e2011-11-30 15:57:28 +00002325 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002326 __ b(&done);
2327 __ bind(&generic);
2328 __ mov(r2, Operand(name));
2329 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2330 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2331 } else {
2332 DeoptimizeIf(ne, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002333 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002334 }
2335 __ bind(&done);
2336 }
2337}
2338
2339
Ben Murdochb0fe1622011-05-05 13:52:32 +01002340void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2341 ASSERT(ToRegister(instr->object()).is(r0));
2342 ASSERT(ToRegister(instr->result()).is(r0));
2343
2344 // Name is always in r2.
2345 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002346 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002347 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2348}
2349
2350
Steve Block9fac8402011-05-12 15:51:54 +01002351void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2352 Register scratch = scratch0();
2353 Register function = ToRegister(instr->function());
2354 Register result = ToRegister(instr->result());
2355
2356 // Check that the function really is a function. Load map into the
2357 // result register.
2358 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2359 DeoptimizeIf(ne, instr->environment());
2360
2361 // Make sure that the function has an instance prototype.
2362 Label non_instance;
2363 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2364 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2365 __ b(ne, &non_instance);
2366
2367 // Get the prototype or initial map from the function.
2368 __ ldr(result,
2369 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2370
2371 // Check that the function has a prototype or an initial map.
2372 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2373 __ cmp(result, ip);
2374 DeoptimizeIf(eq, instr->environment());
2375
2376 // If the function does not have an initial map, we're done.
2377 Label done;
2378 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2379 __ b(ne, &done);
2380
2381 // Get the prototype from the initial map.
2382 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2383 __ jmp(&done);
2384
2385 // Non-instance prototype: Fetch prototype from constructor field
2386 // in initial map.
2387 __ bind(&non_instance);
2388 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2389
2390 // All done.
2391 __ bind(&done);
2392}
2393
2394
Ben Murdochb0fe1622011-05-05 13:52:32 +01002395void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002396 Register result = ToRegister(instr->result());
2397 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002398 Register scratch = scratch0();
2399
Steve Block1e0659c2011-05-24 12:43:12 +01002400 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002401 if (FLAG_debug_code) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002402 Label done, fail;
Steve Block1e0659c2011-05-24 12:43:12 +01002403 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002404 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2405 __ cmp(scratch, ip);
2406 __ b(eq, &done);
2407 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2408 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002409 __ b(eq, &done);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002410 // |scratch| still contains |input|'s map.
2411 __ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
2412 __ ubfx(scratch, scratch, Map::kElementsKindShift,
2413 Map::kElementsKindBitCount);
Ben Murdoch589d6972011-11-30 16:04:58 +00002414 __ cmp(scratch, Operand(FAST_ELEMENTS));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002415 __ b(eq, &done);
Ben Murdoch589d6972011-11-30 16:04:58 +00002416 __ cmp(scratch, Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002417 __ b(lt, &fail);
Ben Murdoch589d6972011-11-30 16:04:58 +00002418 __ cmp(scratch, Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002419 __ b(le, &done);
2420 __ bind(&fail);
2421 __ Abort("Check for fast or external elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002422 __ bind(&done);
2423 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002424}
2425
2426
Steve Block44f0eee2011-05-26 01:26:41 +01002427void LCodeGen::DoLoadExternalArrayPointer(
2428 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002429 Register to_reg = ToRegister(instr->result());
2430 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002431 __ ldr(to_reg, FieldMemOperand(from_reg,
2432 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002433}
2434
2435
Ben Murdochb0fe1622011-05-05 13:52:32 +01002436void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002437 Register arguments = ToRegister(instr->arguments());
2438 Register length = ToRegister(instr->length());
2439 Register index = ToRegister(instr->index());
2440 Register result = ToRegister(instr->result());
2441
2442 // Bailout index is not a valid argument index. Use unsigned check to get
2443 // negative check for free.
2444 __ sub(length, length, index, SetCC);
2445 DeoptimizeIf(ls, instr->environment());
2446
2447 // There are two words between the frame pointer and the last argument.
2448 // Subtracting from length accounts for one of them add one more.
2449 __ add(length, length, Operand(1));
2450 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002451}
2452
2453
2454void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002455 Register elements = ToRegister(instr->elements());
2456 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002457 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002458 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01002459
2460 // Load the result.
2461 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2462 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2463
Ben Murdochb8e0da22011-05-16 14:20:40 +01002464 // Check for the hole value.
Ben Murdoch257744e2011-11-30 15:57:28 +00002465 if (instr->hydrogen()->RequiresHoleCheck()) {
2466 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2467 __ cmp(result, scratch);
2468 DeoptimizeIf(eq, instr->environment());
2469 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002470}
2471
2472
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002473void LCodeGen::DoLoadKeyedFastDoubleElement(
2474 LLoadKeyedFastDoubleElement* instr) {
2475 Register elements = ToRegister(instr->elements());
2476 bool key_is_constant = instr->key()->IsConstantOperand();
2477 Register key = no_reg;
2478 DwVfpRegister result = ToDoubleRegister(instr->result());
2479 Register scratch = scratch0();
2480
2481 int shift_size =
Ben Murdoch589d6972011-11-30 16:04:58 +00002482 ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002483 int constant_key = 0;
2484 if (key_is_constant) {
2485 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2486 if (constant_key & 0xF0000000) {
2487 Abort("array index constant value too big.");
2488 }
2489 } else {
2490 key = ToRegister(instr->key());
2491 }
2492
2493 Operand operand = key_is_constant
2494 ? Operand(constant_key * (1 << shift_size) +
2495 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
2496 : Operand(key, LSL, shift_size);
2497 __ add(elements, elements, operand);
2498 if (!key_is_constant) {
2499 __ add(elements, elements,
2500 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2501 }
2502
2503 if (instr->hydrogen()->RequiresHoleCheck()) {
2504 // TODO(danno): If no hole check is required, there is no need to allocate
2505 // elements into a temporary register, instead scratch can be used.
2506 __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2507 __ cmp(scratch, Operand(kHoleNanUpper32));
2508 DeoptimizeIf(eq, instr->environment());
2509 }
2510
2511 __ vldr(result, elements, 0);
2512}
2513
2514
Steve Block44f0eee2011-05-26 01:26:41 +01002515void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2516 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002517 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00002518 Register key = no_reg;
Ben Murdoch589d6972011-11-30 16:04:58 +00002519 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00002520 bool key_is_constant = instr->key()->IsConstantOperand();
2521 int constant_key = 0;
2522 if (key_is_constant) {
2523 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2524 if (constant_key & 0xF0000000) {
2525 Abort("array index constant value too big.");
2526 }
2527 } else {
2528 key = ToRegister(instr->key());
2529 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002530 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00002531
Ben Murdoch589d6972011-11-30 16:04:58 +00002532 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
2533 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002534 CpuFeatures::Scope scope(VFP3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002535 DwVfpRegister result = ToDoubleRegister(instr->result());
2536 Operand operand = key_is_constant
2537 ? Operand(constant_key * (1 << shift_size))
2538 : Operand(key, LSL, shift_size);
Ben Murdoch257744e2011-11-30 15:57:28 +00002539 __ add(scratch0(), external_pointer, operand);
Ben Murdoch589d6972011-11-30 16:04:58 +00002540 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002541 __ vldr(result.low(), scratch0(), 0);
2542 __ vcvt_f64_f32(result, result.low());
Ben Murdoch589d6972011-11-30 16:04:58 +00002543 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00002544 __ vldr(result, scratch0(), 0);
2545 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01002546 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002547 Register result = ToRegister(instr->result());
Ben Murdoch257744e2011-11-30 15:57:28 +00002548 MemOperand mem_operand(key_is_constant
2549 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
2550 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002551 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002552 case EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002553 __ ldrsb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002554 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002555 case EXTERNAL_PIXEL_ELEMENTS:
2556 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002557 __ ldrb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002558 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002559 case EXTERNAL_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002560 __ ldrsh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002561 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002562 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002563 __ ldrh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002564 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002565 case EXTERNAL_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002566 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002567 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002568 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002569 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002570 __ cmp(result, Operand(0x80000000));
2571 // TODO(danno): we could be more clever here, perhaps having a special
2572 // version of the stub that detects if the overflow case actually
2573 // happens, and generate code that returns a double rather than int.
2574 DeoptimizeIf(cs, instr->environment());
2575 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002576 case EXTERNAL_FLOAT_ELEMENTS:
2577 case EXTERNAL_DOUBLE_ELEMENTS:
2578 case FAST_DOUBLE_ELEMENTS:
2579 case FAST_ELEMENTS:
2580 case DICTIONARY_ELEMENTS:
2581 case NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01002582 UNREACHABLE();
2583 break;
2584 }
2585 }
Steve Block1e0659c2011-05-24 12:43:12 +01002586}
2587
2588
Ben Murdochb0fe1622011-05-05 13:52:32 +01002589void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2590 ASSERT(ToRegister(instr->object()).is(r1));
2591 ASSERT(ToRegister(instr->key()).is(r0));
2592
Steve Block44f0eee2011-05-26 01:26:41 +01002593 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002594 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2595}
2596
2597
2598void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002599 Register scratch = scratch0();
2600 Register result = ToRegister(instr->result());
2601
2602 // Check if the calling frame is an arguments adaptor frame.
2603 Label done, adapted;
2604 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2605 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2606 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2607
2608 // Result is the frame pointer for the frame if not adapted and for the real
2609 // frame below the adaptor frame if adapted.
2610 __ mov(result, fp, LeaveCC, ne);
2611 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002612}
2613
2614
2615void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002616 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002617 Register result = ToRegister(instr->result());
2618
2619 Label done;
2620
2621 // If no arguments adaptor frame the number of arguments is fixed.
2622 __ cmp(fp, elem);
2623 __ mov(result, Operand(scope()->num_parameters()));
2624 __ b(eq, &done);
2625
2626 // Arguments adaptor frame present. Get argument length from there.
2627 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2628 __ ldr(result,
2629 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2630 __ SmiUntag(result);
2631
2632 // Argument length is in result register.
2633 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002634}
2635
2636
2637void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002638 Register receiver = ToRegister(instr->receiver());
2639 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002640 Register length = ToRegister(instr->length());
2641 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002642 Register scratch = scratch0();
2643 ASSERT(receiver.is(r0)); // Used for parameter count.
2644 ASSERT(function.is(r1)); // Required by InvokeFunction.
2645 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002646
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002647 // If the receiver is null or undefined, we have to pass the global
2648 // object as a receiver to normal functions. Values have to be
2649 // passed unchanged to builtins and strict-mode functions.
Steve Block1e0659c2011-05-24 12:43:12 +01002650 Label global_object, receiver_ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002651
2652 // Do not transform the receiver to object for strict mode
2653 // functions.
2654 __ ldr(scratch,
2655 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2656 __ ldr(scratch,
2657 FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
2658 __ tst(scratch,
2659 Operand(1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize)));
2660 __ b(ne, &receiver_ok);
2661
2662 // Do not transform the receiver to object for builtins.
2663 __ tst(scratch, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
2664 __ b(ne, &receiver_ok);
2665
2666 // Normal function. Replace undefined or null with global receiver.
Steve Block1e0659c2011-05-24 12:43:12 +01002667 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2668 __ cmp(receiver, scratch);
2669 __ b(eq, &global_object);
2670 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2671 __ cmp(receiver, scratch);
2672 __ b(eq, &global_object);
2673
2674 // Deoptimize if the receiver is not a JS object.
2675 __ tst(receiver, Operand(kSmiTagMask));
2676 DeoptimizeIf(eq, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002677 __ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE);
2678 DeoptimizeIf(lt, instr->environment());
Steve Block1e0659c2011-05-24 12:43:12 +01002679 __ jmp(&receiver_ok);
2680
2681 __ bind(&global_object);
2682 __ ldr(receiver, GlobalObjectOperand());
Ben Murdoch257744e2011-11-30 15:57:28 +00002683 __ ldr(receiver,
2684 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002685 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002686
2687 // Copy the arguments to this function possibly from the
2688 // adaptor frame below it.
2689 const uint32_t kArgumentsLimit = 1 * KB;
2690 __ cmp(length, Operand(kArgumentsLimit));
2691 DeoptimizeIf(hi, instr->environment());
2692
2693 // Push the receiver and use the register to keep the original
2694 // number of arguments.
2695 __ push(receiver);
2696 __ mov(receiver, length);
2697 // The arguments are at a one pointer size offset from elements.
2698 __ add(elements, elements, Operand(1 * kPointerSize));
2699
2700 // Loop through the arguments pushing them onto the execution
2701 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002702 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002703 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002704 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002705 __ b(eq, &invoke);
2706 __ bind(&loop);
2707 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2708 __ push(scratch);
2709 __ sub(length, length, Operand(1), SetCC);
2710 __ b(ne, &loop);
2711
2712 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002713 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2714 LPointerMap* pointers = instr->pointer_map();
2715 LEnvironment* env = instr->deoptimization_environment();
2716 RecordPosition(pointers->position());
2717 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002718 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002719 pointers,
2720 env->deoptimization_index());
2721 // The number of arguments is stored in receiver which is r0, as expected
2722 // by InvokeFunction.
2723 v8::internal::ParameterCount actual(receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00002724 __ InvokeFunction(function, actual, CALL_FUNCTION,
2725 safepoint_generator, CALL_AS_METHOD);
Steve Block1e0659c2011-05-24 12:43:12 +01002726 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002727}
2728
2729
2730void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002731 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002732 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2733 Abort("DoPushArgument not implemented for double type.");
2734 } else {
2735 Register argument_reg = EmitLoadRegister(argument, ip);
2736 __ push(argument_reg);
2737 }
2738}
2739
2740
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002741void LCodeGen::DoThisFunction(LThisFunction* instr) {
2742 Register result = ToRegister(instr->result());
2743 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2744}
2745
2746
Steve Block1e0659c2011-05-24 12:43:12 +01002747void LCodeGen::DoContext(LContext* instr) {
2748 Register result = ToRegister(instr->result());
2749 __ mov(result, cp);
2750}
2751
2752
2753void LCodeGen::DoOuterContext(LOuterContext* instr) {
2754 Register context = ToRegister(instr->context());
2755 Register result = ToRegister(instr->result());
2756 __ ldr(result,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002757 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block1e0659c2011-05-24 12:43:12 +01002758}
2759
2760
Ben Murdochb0fe1622011-05-05 13:52:32 +01002761void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2762 Register result = ToRegister(instr->result());
2763 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2764}
2765
2766
2767void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002768 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002769 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002770 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002771}
2772
2773
2774void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2775 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00002776 LInstruction* instr,
2777 CallKind call_kind) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002778 // Change context if needed.
2779 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002780 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002781 scope()->contains_with() ||
2782 (scope()->num_heap_slots() > 0);
2783 if (change_context) {
2784 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2785 }
2786
2787 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2788 // is available to write to at this point.
2789 if (!function->NeedsArgumentsAdaption()) {
2790 __ mov(r0, Operand(arity));
2791 }
2792
2793 LPointerMap* pointers = instr->pointer_map();
2794 RecordPosition(pointers->position());
2795
2796 // Invoke function.
Ben Murdoch257744e2011-11-30 15:57:28 +00002797 __ SetCallKind(r5, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002798 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2799 __ Call(ip);
2800
2801 // Setup deoptimization.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002802 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002803
2804 // Restore context.
2805 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2806}
2807
2808
2809void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002810 ASSERT(ToRegister(instr->result()).is(r0));
2811 __ mov(r1, Operand(instr->function()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002812 CallKnownFunction(instr->function(),
2813 instr->arity(),
2814 instr,
2815 CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002816}
2817
2818
2819void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002820 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002821 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002822 Register scratch = scratch0();
2823
2824 // Deoptimize if not a heap number.
2825 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2826 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2827 __ cmp(scratch, Operand(ip));
2828 DeoptimizeIf(ne, instr->environment());
2829
2830 Label done;
2831 Register exponent = scratch0();
2832 scratch = no_reg;
2833 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2834 // Check the sign of the argument. If the argument is positive, just
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002835 // return it.
Steve Block1e0659c2011-05-24 12:43:12 +01002836 __ tst(exponent, Operand(HeapNumber::kSignMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002837 // Move the input to the result if necessary.
2838 __ Move(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01002839 __ b(eq, &done);
2840
2841 // Input is negative. Reverse its sign.
2842 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002843 {
2844 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002845
Ben Murdoch8b112d22011-06-08 16:22:53 +01002846 // Registers were saved at the safepoint, so we can use
2847 // many scratch registers.
2848 Register tmp1 = input.is(r1) ? r0 : r1;
2849 Register tmp2 = input.is(r2) ? r0 : r2;
2850 Register tmp3 = input.is(r3) ? r0 : r3;
2851 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01002852
Ben Murdoch8b112d22011-06-08 16:22:53 +01002853 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01002854
Ben Murdoch8b112d22011-06-08 16:22:53 +01002855 Label allocated, slow;
2856 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2857 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2858 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01002859
Ben Murdoch8b112d22011-06-08 16:22:53 +01002860 // Slow case: Call the runtime system to do the number allocation.
2861 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01002862
Ben Murdoch8b112d22011-06-08 16:22:53 +01002863 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2864 // Set the pointer to the new heap number in tmp.
2865 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2866 // Restore input_reg after call to runtime.
2867 __ LoadFromSafepointRegisterSlot(input, input);
2868 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002869
Ben Murdoch8b112d22011-06-08 16:22:53 +01002870 __ bind(&allocated);
2871 // exponent: floating point exponent value.
2872 // tmp1: allocated heap number.
2873 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2874 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2875 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2876 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002877
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002878 __ StoreToSafepointRegisterSlot(tmp1, result);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002879 }
Steve Block1e0659c2011-05-24 12:43:12 +01002880
2881 __ bind(&done);
2882}
2883
2884
2885void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2886 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002887 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002888 __ cmp(input, Operand(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002889 __ Move(result, input, pl);
Steve Block1e0659c2011-05-24 12:43:12 +01002890 // We can make rsb conditional because the previous cmp instruction
2891 // will clear the V (overflow) flag and rsb won't set this flag
2892 // if input is positive.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002893 __ rsb(result, input, Operand(0), SetCC, mi);
Steve Block1e0659c2011-05-24 12:43:12 +01002894 // Deoptimize on overflow.
2895 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002896}
2897
2898
2899void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002900 // Class for deferred case.
2901 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2902 public:
2903 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2904 LUnaryMathOperation* instr)
2905 : LDeferredCode(codegen), instr_(instr) { }
2906 virtual void Generate() {
2907 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2908 }
2909 private:
2910 LUnaryMathOperation* instr_;
2911 };
2912
Steve Block1e0659c2011-05-24 12:43:12 +01002913 Representation r = instr->hydrogen()->value()->representation();
2914 if (r.IsDouble()) {
2915 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002916 DwVfpRegister result = ToDoubleRegister(instr->result());
2917 __ vabs(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01002918 } else if (r.IsInteger32()) {
2919 EmitIntegerMathAbs(instr);
2920 } else {
2921 // Representation is tagged.
2922 DeferredMathAbsTaggedHeapNumber* deferred =
2923 new DeferredMathAbsTaggedHeapNumber(this, instr);
2924 Register input = ToRegister(instr->InputAt(0));
2925 // Smi check.
2926 __ JumpIfNotSmi(input, deferred->entry());
2927 // If smi, handle it directly.
2928 EmitIntegerMathAbs(instr);
2929 __ bind(deferred->exit());
2930 }
2931}
2932
2933
Ben Murdochb0fe1622011-05-05 13:52:32 +01002934void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002935 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002936 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002937 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002938 Register scratch1 = scratch0();
2939 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002940
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002941 __ EmitVFPTruncate(kRoundToMinusInf,
2942 single_scratch,
2943 input,
2944 scratch1,
2945 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002946 DeoptimizeIf(ne, instr->environment());
2947
2948 // Move the result back to general purpose register r0.
2949 __ vmov(result, single_scratch);
2950
Steve Block44f0eee2011-05-26 01:26:41 +01002951 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2952 // Test for -0.
2953 Label done;
2954 __ cmp(result, Operand(0));
2955 __ b(ne, &done);
2956 __ vmov(scratch1, input.high());
2957 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2958 DeoptimizeIf(ne, instr->environment());
2959 __ bind(&done);
2960 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002961}
2962
2963
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002964void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2965 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2966 Register result = ToRegister(instr->result());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002967 Register scratch = scratch0();
Ben Murdoch257744e2011-11-30 15:57:28 +00002968 Label done, check_sign_on_zero;
2969
2970 // Extract exponent bits.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002971 __ vmov(result, input.high());
2972 __ ubfx(scratch,
2973 result,
Ben Murdoch257744e2011-11-30 15:57:28 +00002974 HeapNumber::kExponentShift,
2975 HeapNumber::kExponentBits);
2976
2977 // If the number is in ]-0.5, +0.5[, the result is +/- 0.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002978 __ cmp(scratch, Operand(HeapNumber::kExponentBias - 2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002979 __ mov(result, Operand(0), LeaveCC, le);
2980 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2981 __ b(le, &check_sign_on_zero);
2982 } else {
2983 __ b(le, &done);
2984 }
2985
2986 // The following conversion will not work with numbers
2987 // outside of ]-2^32, 2^32[.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002988 __ cmp(scratch, Operand(HeapNumber::kExponentBias + 32));
Ben Murdoch257744e2011-11-30 15:57:28 +00002989 DeoptimizeIf(ge, instr->environment());
2990
2991 // Save the original sign for later comparison.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002992 __ and_(scratch, result, Operand(HeapNumber::kSignMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002993
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002994 __ Vmov(double_scratch0(), 0.5);
Ben Murdoch257744e2011-11-30 15:57:28 +00002995 __ vadd(input, input, double_scratch0());
2996
2997 // Check sign of the result: if the sign changed, the input
2998 // value was in ]0.5, 0[ and the result should be -0.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002999 __ vmov(result, input.high());
3000 __ eor(result, result, Operand(scratch), SetCC);
Ben Murdoch257744e2011-11-30 15:57:28 +00003001 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3002 DeoptimizeIf(mi, instr->environment());
3003 } else {
3004 __ mov(result, Operand(0), LeaveCC, mi);
3005 __ b(mi, &done);
3006 }
3007
3008 __ EmitVFPTruncate(kRoundToMinusInf,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003009 double_scratch0().low(),
3010 input,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003011 result,
3012 scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003013 DeoptimizeIf(ne, instr->environment());
3014 __ vmov(result, double_scratch0().low());
3015
Steve Block44f0eee2011-05-26 01:26:41 +01003016 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3017 // Test for -0.
Steve Block44f0eee2011-05-26 01:26:41 +01003018 __ cmp(result, Operand(0));
3019 __ b(ne, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003020 __ bind(&check_sign_on_zero);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003021 __ vmov(scratch, input.high());
3022 __ tst(scratch, Operand(HeapNumber::kSignMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003023 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003024 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003025 __ bind(&done);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003026}
3027
3028
Ben Murdochb0fe1622011-05-05 13:52:32 +01003029void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003030 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003031 DoubleRegister result = ToDoubleRegister(instr->result());
3032 __ vsqrt(result, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003033}
3034
3035
Steve Block44f0eee2011-05-26 01:26:41 +01003036void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3037 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003038 DoubleRegister result = ToDoubleRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +01003039 // Add +0 to convert -0 to +0.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003040 __ vadd(result, input, kDoubleRegZero);
3041 __ vsqrt(result, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003042}
3043
3044
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003045void LCodeGen::DoPower(LPower* instr) {
3046 LOperand* left = instr->InputAt(0);
3047 LOperand* right = instr->InputAt(1);
3048 Register scratch = scratch0();
3049 DoubleRegister result_reg = ToDoubleRegister(instr->result());
3050 Representation exponent_type = instr->hydrogen()->right()->representation();
3051 if (exponent_type.IsDouble()) {
3052 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003053 __ PrepareCallCFunction(0, 2, scratch);
3054 __ SetCallCDoubleArguments(ToDoubleRegister(left),
3055 ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003056 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003057 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003058 } else if (exponent_type.IsInteger32()) {
3059 ASSERT(ToRegister(right).is(r0));
3060 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003061 __ PrepareCallCFunction(1, 1, scratch);
3062 __ SetCallCDoubleArguments(ToDoubleRegister(left), ToRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003063 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003064 ExternalReference::power_double_int_function(isolate()), 1, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003065 } else {
3066 ASSERT(exponent_type.IsTagged());
3067 ASSERT(instr->hydrogen()->left()->representation().IsDouble());
3068
3069 Register right_reg = ToRegister(right);
3070
3071 // Check for smi on the right hand side.
3072 Label non_smi, call;
3073 __ JumpIfNotSmi(right_reg, &non_smi);
3074
3075 // Untag smi and convert it to a double.
3076 __ SmiUntag(right_reg);
3077 SwVfpRegister single_scratch = double_scratch0().low();
3078 __ vmov(single_scratch, right_reg);
3079 __ vcvt_f64_s32(result_reg, single_scratch);
3080 __ jmp(&call);
3081
3082 // Heap number map check.
3083 __ bind(&non_smi);
3084 __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
3085 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3086 __ cmp(scratch, Operand(ip));
3087 DeoptimizeIf(ne, instr->environment());
3088 int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
3089 __ add(scratch, right_reg, Operand(value_offset));
3090 __ vldr(result_reg, scratch, 0);
3091
3092 // Prepare arguments and call C function.
3093 __ bind(&call);
Ben Murdoch257744e2011-11-30 15:57:28 +00003094 __ PrepareCallCFunction(0, 2, scratch);
3095 __ SetCallCDoubleArguments(ToDoubleRegister(left), result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01003096 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003097 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003098 }
3099 // Store the result in the result register.
3100 __ GetCFunctionDoubleResult(result_reg);
3101}
3102
3103
3104void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3105 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3106 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3107 TranscendentalCacheStub::UNTAGGED);
3108 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3109}
3110
3111
3112void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3113 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3114 TranscendentalCacheStub stub(TranscendentalCache::COS,
3115 TranscendentalCacheStub::UNTAGGED);
3116 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3117}
3118
3119
3120void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3121 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3122 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3123 TranscendentalCacheStub::UNTAGGED);
3124 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3125}
3126
3127
Ben Murdochb0fe1622011-05-05 13:52:32 +01003128void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3129 switch (instr->op()) {
3130 case kMathAbs:
3131 DoMathAbs(instr);
3132 break;
3133 case kMathFloor:
3134 DoMathFloor(instr);
3135 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003136 case kMathRound:
3137 DoMathRound(instr);
3138 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003139 case kMathSqrt:
3140 DoMathSqrt(instr);
3141 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003142 case kMathPowHalf:
3143 DoMathPowHalf(instr);
3144 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003145 case kMathCos:
3146 DoMathCos(instr);
3147 break;
3148 case kMathSin:
3149 DoMathSin(instr);
3150 break;
3151 case kMathLog:
3152 DoMathLog(instr);
3153 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003154 default:
3155 Abort("Unimplemented type of LUnaryMathOperation.");
3156 UNREACHABLE();
3157 }
3158}
3159
3160
Ben Murdoch257744e2011-11-30 15:57:28 +00003161void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3162 ASSERT(ToRegister(instr->function()).is(r1));
3163 ASSERT(instr->HasPointerMap());
3164 ASSERT(instr->HasDeoptimizationEnvironment());
3165 LPointerMap* pointers = instr->pointer_map();
3166 LEnvironment* env = instr->deoptimization_environment();
3167 RecordPosition(pointers->position());
3168 RegisterEnvironmentForDeoptimization(env);
3169 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3170 ParameterCount count(instr->arity());
3171 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3172 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3173}
3174
3175
Ben Murdochb0fe1622011-05-05 13:52:32 +01003176void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003177 ASSERT(ToRegister(instr->result()).is(r0));
3178
3179 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003180 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003181 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003182 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3183 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003184}
3185
3186
3187void LCodeGen::DoCallNamed(LCallNamed* instr) {
3188 ASSERT(ToRegister(instr->result()).is(r0));
3189
3190 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003191 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3192 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003193 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003194 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003195 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003196 // Restore context register.
3197 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3198}
3199
3200
3201void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003202 ASSERT(ToRegister(instr->result()).is(r0));
3203
3204 int arity = instr->arity();
Ben Murdoch589d6972011-11-30 16:04:58 +00003205 CallFunctionStub stub(arity, RECEIVER_MIGHT_BE_IMPLICIT);
Steve Block9fac8402011-05-12 15:51:54 +01003206 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3207 __ Drop(1);
3208 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003209}
3210
3211
3212void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003213 ASSERT(ToRegister(instr->result()).is(r0));
3214
3215 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003216 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
Steve Block44f0eee2011-05-26 01:26:41 +01003217 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003218 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003219 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003220 CallCode(ic, mode, instr);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003221 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003222}
3223
3224
3225void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3226 ASSERT(ToRegister(instr->result()).is(r0));
3227 __ mov(r1, Operand(instr->target()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003228 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003229}
3230
3231
3232void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003233 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003234 ASSERT(ToRegister(instr->result()).is(r0));
3235
Steve Block44f0eee2011-05-26 01:26:41 +01003236 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003237 __ mov(r0, Operand(instr->arity()));
3238 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3239}
3240
3241
3242void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3243 CallRuntime(instr->function(), instr->arity(), instr);
3244}
3245
3246
3247void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003248 Register object = ToRegister(instr->object());
3249 Register value = ToRegister(instr->value());
3250 Register scratch = scratch0();
3251 int offset = instr->offset();
3252
3253 ASSERT(!object.is(value));
3254
3255 if (!instr->transition().is_null()) {
3256 __ mov(scratch, Operand(instr->transition()));
3257 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3258 }
3259
3260 // Do the store.
3261 if (instr->is_in_object()) {
3262 __ str(value, FieldMemOperand(object, offset));
3263 if (instr->needs_write_barrier()) {
3264 // Update the write barrier for the object for in-object properties.
3265 __ RecordWrite(object, Operand(offset), value, scratch);
3266 }
3267 } else {
3268 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3269 __ str(value, FieldMemOperand(scratch, offset));
3270 if (instr->needs_write_barrier()) {
3271 // Update the write barrier for the properties array.
3272 // object is used as a scratch register.
3273 __ RecordWrite(scratch, Operand(offset), value, object);
3274 }
3275 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003276}
3277
3278
3279void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3280 ASSERT(ToRegister(instr->object()).is(r1));
3281 ASSERT(ToRegister(instr->value()).is(r0));
3282
3283 // Name is always in r2.
3284 __ mov(r2, Operand(instr->name()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003285 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003286 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3287 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003288 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3289}
3290
3291
3292void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003293 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003294 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003295}
3296
3297
3298void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003299 Register value = ToRegister(instr->value());
3300 Register elements = ToRegister(instr->object());
3301 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3302 Register scratch = scratch0();
3303
3304 // Do the store.
3305 if (instr->key()->IsConstantOperand()) {
3306 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3307 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3308 int offset =
3309 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3310 __ str(value, FieldMemOperand(elements, offset));
3311 } else {
3312 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3313 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3314 }
3315
3316 if (instr->hydrogen()->NeedsWriteBarrier()) {
3317 // Compute address of modified element and store it into key register.
3318 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
3319 __ RecordWrite(elements, key, value);
3320 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003321}
3322
3323
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003324void LCodeGen::DoStoreKeyedFastDoubleElement(
3325 LStoreKeyedFastDoubleElement* instr) {
3326 DwVfpRegister value = ToDoubleRegister(instr->value());
3327 Register elements = ToRegister(instr->elements());
3328 Register key = no_reg;
3329 Register scratch = scratch0();
3330 bool key_is_constant = instr->key()->IsConstantOperand();
3331 int constant_key = 0;
3332 Label not_nan;
3333
3334 // Calculate the effective address of the slot in the array to store the
3335 // double value.
3336 if (key_is_constant) {
3337 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3338 if (constant_key & 0xF0000000) {
3339 Abort("array index constant value too big.");
3340 }
3341 } else {
3342 key = ToRegister(instr->key());
3343 }
Ben Murdoch589d6972011-11-30 16:04:58 +00003344 int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003345 Operand operand = key_is_constant
3346 ? Operand(constant_key * (1 << shift_size) +
3347 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
3348 : Operand(key, LSL, shift_size);
3349 __ add(scratch, elements, operand);
3350 if (!key_is_constant) {
3351 __ add(scratch, scratch,
3352 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3353 }
3354
3355 // Check for NaN. All NaNs must be canonicalized.
3356 __ VFPCompareAndSetFlags(value, value);
3357
3358 // Only load canonical NaN if the comparison above set the overflow.
3359 __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs);
3360
3361 __ bind(&not_nan);
3362 __ vstr(value, scratch, 0);
3363}
3364
3365
Steve Block44f0eee2011-05-26 01:26:41 +01003366void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3367 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003368
3369 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00003370 Register key = no_reg;
Ben Murdoch589d6972011-11-30 16:04:58 +00003371 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00003372 bool key_is_constant = instr->key()->IsConstantOperand();
3373 int constant_key = 0;
3374 if (key_is_constant) {
3375 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3376 if (constant_key & 0xF0000000) {
3377 Abort("array index constant value too big.");
3378 }
3379 } else {
3380 key = ToRegister(instr->key());
3381 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003382 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00003383
Ben Murdoch589d6972011-11-30 16:04:58 +00003384 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
3385 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003386 CpuFeatures::Scope scope(VFP3);
3387 DwVfpRegister value(ToDoubleRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003388 Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
3389 : Operand(key, LSL, shift_size));
3390 __ add(scratch0(), external_pointer, operand);
Ben Murdoch589d6972011-11-30 16:04:58 +00003391 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003392 __ vcvt_f32_f64(double_scratch0().low(), value);
3393 __ vstr(double_scratch0().low(), scratch0(), 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00003394 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00003395 __ vstr(value, scratch0(), 0);
3396 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003397 } else {
3398 Register value(ToRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003399 MemOperand mem_operand(key_is_constant
3400 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
3401 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003402 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00003403 case EXTERNAL_PIXEL_ELEMENTS:
3404 case EXTERNAL_BYTE_ELEMENTS:
3405 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003406 __ strb(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003407 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003408 case EXTERNAL_SHORT_ELEMENTS:
3409 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003410 __ strh(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003411 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003412 case EXTERNAL_INT_ELEMENTS:
3413 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003414 __ str(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003415 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003416 case EXTERNAL_FLOAT_ELEMENTS:
3417 case EXTERNAL_DOUBLE_ELEMENTS:
3418 case FAST_DOUBLE_ELEMENTS:
3419 case FAST_ELEMENTS:
3420 case DICTIONARY_ELEMENTS:
3421 case NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01003422 UNREACHABLE();
3423 break;
3424 }
3425 }
Steve Block44f0eee2011-05-26 01:26:41 +01003426}
3427
3428
Ben Murdochb0fe1622011-05-05 13:52:32 +01003429void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3430 ASSERT(ToRegister(instr->object()).is(r2));
3431 ASSERT(ToRegister(instr->key()).is(r1));
3432 ASSERT(ToRegister(instr->value()).is(r0));
3433
Ben Murdoch8b112d22011-06-08 16:22:53 +01003434 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003435 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3436 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003437 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3438}
3439
3440
Ben Murdoch257744e2011-11-30 15:57:28 +00003441void LCodeGen::DoStringAdd(LStringAdd* instr) {
3442 __ push(ToRegister(instr->left()));
3443 __ push(ToRegister(instr->right()));
3444 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3445 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3446}
3447
3448
Steve Block1e0659c2011-05-24 12:43:12 +01003449void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3450 class DeferredStringCharCodeAt: public LDeferredCode {
3451 public:
3452 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3453 : LDeferredCode(codegen), instr_(instr) { }
3454 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3455 private:
3456 LStringCharCodeAt* instr_;
3457 };
3458
Steve Block1e0659c2011-05-24 12:43:12 +01003459 Register string = ToRegister(instr->string());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003460 Register index = ToRegister(instr->index());
Steve Block1e0659c2011-05-24 12:43:12 +01003461 Register result = ToRegister(instr->result());
3462
3463 DeferredStringCharCodeAt* deferred =
3464 new DeferredStringCharCodeAt(this, instr);
3465
Steve Block1e0659c2011-05-24 12:43:12 +01003466 // Fetch the instance type of the receiver into result register.
3467 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3468 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3469
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003470 // We need special handling for indirect strings.
3471 Label check_sequential;
3472 __ tst(result, Operand(kIsIndirectStringMask));
3473 __ b(eq, &check_sequential);
Steve Block1e0659c2011-05-24 12:43:12 +01003474
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003475 // Dispatch on the indirect string shape: slice or cons.
3476 Label cons_string;
3477 __ tst(result, Operand(kSlicedNotConsMask));
3478 __ b(eq, &cons_string);
Steve Block1e0659c2011-05-24 12:43:12 +01003479
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003480 // Handle slices.
3481 Label indirect_string_loaded;
3482 __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
3483 __ add(index, index, Operand(result, ASR, kSmiTagSize));
3484 __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
3485 __ jmp(&indirect_string_loaded);
3486
3487 // Handle conses.
Steve Block1e0659c2011-05-24 12:43:12 +01003488 // Check whether the right hand side is the empty string (i.e. if
3489 // this is really a flat string in a cons string). If that is not
3490 // the case we would rather go to the runtime system now to flatten
3491 // the string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003492 __ bind(&cons_string);
3493 __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01003494 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003495 __ cmp(result, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01003496 __ b(ne, deferred->entry());
3497 // Get the first of the two strings and load its instance type.
3498 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003499
3500 __ bind(&indirect_string_loaded);
Steve Block1e0659c2011-05-24 12:43:12 +01003501 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3502 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003503
3504 // Check whether the string is sequential. The only non-sequential
3505 // shapes we support have just been unwrapped above.
3506 __ bind(&check_sequential);
Steve Block1e0659c2011-05-24 12:43:12 +01003507 STATIC_ASSERT(kSeqStringTag == 0);
3508 __ tst(result, Operand(kStringRepresentationMask));
3509 __ b(ne, deferred->entry());
3510
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003511 // Dispatch on the encoding: ASCII or two-byte.
3512 Label ascii_string;
Ben Murdoch589d6972011-11-30 16:04:58 +00003513 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
3514 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01003515 __ tst(result, Operand(kStringEncodingMask));
3516 __ b(ne, &ascii_string);
3517
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003518 // Two-byte string.
3519 // Load the two-byte character code into the result register.
3520 Label done;
3521 __ add(result,
3522 string,
3523 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3524 __ ldrh(result, MemOperand(result, index, LSL, 1));
Steve Block1e0659c2011-05-24 12:43:12 +01003525 __ jmp(&done);
3526
3527 // ASCII string.
3528 // Load the byte into the result register.
3529 __ bind(&ascii_string);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003530 __ add(result,
3531 string,
3532 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3533 __ ldrb(result, MemOperand(result, index));
3534
Steve Block1e0659c2011-05-24 12:43:12 +01003535 __ bind(&done);
3536 __ bind(deferred->exit());
3537}
3538
3539
3540void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3541 Register string = ToRegister(instr->string());
3542 Register result = ToRegister(instr->result());
3543 Register scratch = scratch0();
3544
3545 // TODO(3095996): Get rid of this. For now, we need to make the
3546 // result register contain a valid pointer because it is already
3547 // contained in the register pointer map.
3548 __ mov(result, Operand(0));
3549
Ben Murdoch8b112d22011-06-08 16:22:53 +01003550 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003551 __ push(string);
3552 // Push the index as a smi. This is safe because of the checks in
3553 // DoStringCharCodeAt above.
3554 if (instr->index()->IsConstantOperand()) {
3555 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3556 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3557 __ push(scratch);
3558 } else {
3559 Register index = ToRegister(instr->index());
3560 __ SmiTag(index);
3561 __ push(index);
3562 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003563 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003564 if (FLAG_debug_code) {
3565 __ AbortIfNotSmi(r0);
3566 }
3567 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003568 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003569}
3570
3571
Steve Block44f0eee2011-05-26 01:26:41 +01003572void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3573 class DeferredStringCharFromCode: public LDeferredCode {
3574 public:
3575 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3576 : LDeferredCode(codegen), instr_(instr) { }
3577 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3578 private:
3579 LStringCharFromCode* instr_;
3580 };
3581
3582 DeferredStringCharFromCode* deferred =
3583 new DeferredStringCharFromCode(this, instr);
3584
3585 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3586 Register char_code = ToRegister(instr->char_code());
3587 Register result = ToRegister(instr->result());
3588 ASSERT(!char_code.is(result));
3589
3590 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3591 __ b(hi, deferred->entry());
3592 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3593 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3594 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3595 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3596 __ cmp(result, ip);
3597 __ b(eq, deferred->entry());
3598 __ bind(deferred->exit());
3599}
3600
3601
3602void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3603 Register char_code = ToRegister(instr->char_code());
3604 Register result = ToRegister(instr->result());
3605
3606 // TODO(3095996): Get rid of this. For now, we need to make the
3607 // result register contain a valid pointer because it is already
3608 // contained in the register pointer map.
3609 __ mov(result, Operand(0));
3610
Ben Murdoch8b112d22011-06-08 16:22:53 +01003611 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003612 __ SmiTag(char_code);
3613 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003614 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003615 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003616}
3617
3618
Steve Block1e0659c2011-05-24 12:43:12 +01003619void LCodeGen::DoStringLength(LStringLength* instr) {
3620 Register string = ToRegister(instr->InputAt(0));
3621 Register result = ToRegister(instr->result());
3622 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3623}
3624
3625
Ben Murdochb0fe1622011-05-05 13:52:32 +01003626void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003627 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003628 ASSERT(input->IsRegister() || input->IsStackSlot());
3629 LOperand* output = instr->result();
3630 ASSERT(output->IsDoubleRegister());
3631 SwVfpRegister single_scratch = double_scratch0().low();
3632 if (input->IsStackSlot()) {
3633 Register scratch = scratch0();
3634 __ ldr(scratch, ToMemOperand(input));
3635 __ vmov(single_scratch, scratch);
3636 } else {
3637 __ vmov(single_scratch, ToRegister(input));
3638 }
3639 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003640}
3641
3642
3643void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3644 class DeferredNumberTagI: public LDeferredCode {
3645 public:
3646 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3647 : LDeferredCode(codegen), instr_(instr) { }
3648 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3649 private:
3650 LNumberTagI* instr_;
3651 };
3652
Steve Block1e0659c2011-05-24 12:43:12 +01003653 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003654 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3655 Register reg = ToRegister(input);
3656
3657 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3658 __ SmiTag(reg, SetCC);
3659 __ b(vs, deferred->entry());
3660 __ bind(deferred->exit());
3661}
3662
3663
3664void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3665 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003666 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003667 DoubleRegister dbl_scratch = double_scratch0();
3668 SwVfpRegister flt_scratch = dbl_scratch.low();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003669
3670 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003671 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003672
3673 // There was overflow, so bits 30 and 31 of the original integer
3674 // disagree. Try to allocate a heap number in new space and store
3675 // the value in there. If that fails, call the runtime system.
3676 Label done;
3677 __ SmiUntag(reg);
3678 __ eor(reg, reg, Operand(0x80000000));
3679 __ vmov(flt_scratch, reg);
3680 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3681 if (FLAG_inline_new) {
3682 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3683 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3684 if (!reg.is(r5)) __ mov(reg, r5);
3685 __ b(&done);
3686 }
3687
3688 // Slow case: Call the runtime system to do the number allocation.
3689 __ bind(&slow);
3690
3691 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3692 // register is stored, as this register is in the pointer map, but contains an
3693 // integer value.
3694 __ mov(ip, Operand(0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003695 __ StoreToSafepointRegisterSlot(ip, reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003696 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003697 if (!reg.is(r0)) __ mov(reg, r0);
3698
3699 // Done. Put the value in dbl_scratch into the value of the allocated heap
3700 // number.
3701 __ bind(&done);
3702 __ sub(ip, reg, Operand(kHeapObjectTag));
3703 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003704 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003705}
3706
3707
3708void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3709 class DeferredNumberTagD: public LDeferredCode {
3710 public:
3711 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3712 : LDeferredCode(codegen), instr_(instr) { }
3713 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3714 private:
3715 LNumberTagD* instr_;
3716 };
3717
Steve Block1e0659c2011-05-24 12:43:12 +01003718 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003719 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003720 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003721 Register temp1 = ToRegister(instr->TempAt(0));
3722 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003723
3724 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3725 if (FLAG_inline_new) {
3726 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3727 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3728 } else {
3729 __ jmp(deferred->entry());
3730 }
3731 __ bind(deferred->exit());
3732 __ sub(ip, reg, Operand(kHeapObjectTag));
3733 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3734}
3735
3736
3737void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3738 // TODO(3095996): Get rid of this. For now, we need to make the
3739 // result register contain a valid pointer because it is already
3740 // contained in the register pointer map.
3741 Register reg = ToRegister(instr->result());
3742 __ mov(reg, Operand(0));
3743
Ben Murdoch8b112d22011-06-08 16:22:53 +01003744 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3745 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003746 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003747}
3748
3749
3750void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003751 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003752 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3753 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3754 __ SmiTag(ToRegister(input));
3755}
3756
3757
3758void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003759 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003760 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3761 if (instr->needs_check()) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003762 STATIC_ASSERT(kHeapObjectTag == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003763 // If the input is a HeapObject, SmiUntag will set the carry flag.
3764 __ SmiUntag(ToRegister(input), SetCC);
3765 DeoptimizeIf(cs, instr->environment());
3766 } else {
3767 __ SmiUntag(ToRegister(input));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003768 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003769}
3770
3771
3772void LCodeGen::EmitNumberUntagD(Register input_reg,
3773 DoubleRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003774 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003775 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003776 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003777 SwVfpRegister flt_scratch = double_scratch0().low();
3778 ASSERT(!result_reg.is(double_scratch0()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003779
3780 Label load_smi, heap_number, done;
3781
3782 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003783 __ JumpIfSmi(input_reg, &load_smi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003784
3785 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003786 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003787 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003788 __ cmp(scratch, Operand(ip));
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003789 if (deoptimize_on_undefined) {
3790 DeoptimizeIf(ne, env);
3791 } else {
3792 Label heap_number;
3793 __ b(eq, &heap_number);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003794
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003795 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3796 __ cmp(input_reg, Operand(ip));
3797 DeoptimizeIf(ne, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003798
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003799 // Convert undefined to NaN.
3800 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3801 __ sub(ip, ip, Operand(kHeapObjectTag));
3802 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3803 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003804
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003805 __ bind(&heap_number);
3806 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003807 // Heap number to double register conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003808 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3809 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3810 __ jmp(&done);
3811
3812 // Smi to double register conversion
3813 __ bind(&load_smi);
3814 __ SmiUntag(input_reg); // Untag smi before converting to float.
3815 __ vmov(flt_scratch, input_reg);
3816 __ vcvt_f64_s32(result_reg, flt_scratch);
3817 __ SmiTag(input_reg); // Retag smi.
3818 __ bind(&done);
3819}
3820
3821
3822class DeferredTaggedToI: public LDeferredCode {
3823 public:
3824 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3825 : LDeferredCode(codegen), instr_(instr) { }
3826 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3827 private:
3828 LTaggedToI* instr_;
3829};
3830
3831
3832void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003833 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003834 Register scratch1 = scratch0();
3835 Register scratch2 = ToRegister(instr->TempAt(0));
3836 DwVfpRegister double_scratch = double_scratch0();
3837 SwVfpRegister single_scratch = double_scratch.low();
3838
3839 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3840 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3841
3842 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003843
Ben Murdoch257744e2011-11-30 15:57:28 +00003844 // The input was optimistically untagged; revert it.
3845 // The carry flag is set when we reach this deferred code as we just executed
3846 // SmiUntag(heap_object, SetCC)
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003847 STATIC_ASSERT(kHeapObjectTag == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003848 __ adc(input_reg, input_reg, Operand(input_reg));
3849
Ben Murdochb0fe1622011-05-05 13:52:32 +01003850 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01003851 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003852 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01003853 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003854
3855 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003856 Register scratch3 = ToRegister(instr->TempAt(1));
3857 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3858 ASSERT(!scratch3.is(input_reg) &&
3859 !scratch3.is(scratch1) &&
3860 !scratch3.is(scratch2));
3861 // Performs a truncating conversion of a floating point number as used by
3862 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003863 Label heap_number;
3864 __ b(eq, &heap_number);
3865 // Check for undefined. Undefined is converted to zero for truncating
3866 // conversions.
3867 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3868 __ cmp(input_reg, Operand(ip));
3869 DeoptimizeIf(ne, instr->environment());
3870 __ mov(input_reg, Operand(0));
3871 __ b(&done);
3872
3873 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01003874 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3875 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3876
3877 __ EmitECMATruncate(input_reg,
3878 double_scratch2,
3879 single_scratch,
3880 scratch1,
3881 scratch2,
3882 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003883
3884 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003885 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003886 // Deoptimize if we don't have a heap number.
3887 DeoptimizeIf(ne, instr->environment());
3888
3889 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003890 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3891 __ EmitVFPTruncate(kRoundToZero,
3892 single_scratch,
3893 double_scratch,
3894 scratch1,
3895 scratch2,
3896 kCheckForInexactConversion);
3897 DeoptimizeIf(ne, instr->environment());
3898 // Load the result.
3899 __ vmov(input_reg, single_scratch);
3900
Ben Murdochb0fe1622011-05-05 13:52:32 +01003901 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003902 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003903 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01003904 __ vmov(scratch1, double_scratch.high());
3905 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003906 DeoptimizeIf(ne, instr->environment());
3907 }
3908 }
3909 __ bind(&done);
3910}
3911
3912
3913void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003914 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003915 ASSERT(input->IsRegister());
3916 ASSERT(input->Equals(instr->result()));
3917
3918 Register input_reg = ToRegister(input);
3919
3920 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3921
Ben Murdoch257744e2011-11-30 15:57:28 +00003922 // Optimistically untag the input.
3923 // If the input is a HeapObject, SmiUntag will set the carry flag.
3924 __ SmiUntag(input_reg, SetCC);
3925 // Branch to deferred code if the input was tagged.
3926 // The deferred code will take care of restoring the tag.
3927 __ b(cs, deferred->entry());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003928 __ bind(deferred->exit());
3929}
3930
3931
3932void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003933 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003934 ASSERT(input->IsRegister());
3935 LOperand* result = instr->result();
3936 ASSERT(result->IsDoubleRegister());
3937
3938 Register input_reg = ToRegister(input);
3939 DoubleRegister result_reg = ToDoubleRegister(result);
3940
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003941 EmitNumberUntagD(input_reg, result_reg,
3942 instr->hydrogen()->deoptimize_on_undefined(),
3943 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003944}
3945
3946
3947void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003948 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003949 Register scratch1 = scratch0();
3950 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003951 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003952 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003953
Steve Block44f0eee2011-05-26 01:26:41 +01003954 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003955
Steve Block44f0eee2011-05-26 01:26:41 +01003956 if (instr->truncating()) {
3957 Register scratch3 = ToRegister(instr->TempAt(1));
3958 __ EmitECMATruncate(result_reg,
3959 double_input,
3960 single_scratch,
3961 scratch1,
3962 scratch2,
3963 scratch3);
3964 } else {
3965 VFPRoundingMode rounding_mode = kRoundToMinusInf;
3966 __ EmitVFPTruncate(rounding_mode,
3967 single_scratch,
3968 double_input,
3969 scratch1,
3970 scratch2,
3971 kCheckForInexactConversion);
3972 // Deoptimize if we had a vfp invalid exception,
3973 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01003974 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003975 // Retrieve the result.
3976 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003977 }
Steve Block44f0eee2011-05-26 01:26:41 +01003978 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003979}
3980
3981
3982void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003983 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003984 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003985 DeoptimizeIf(ne, instr->environment());
3986}
3987
3988
3989void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3990 LOperand* input = instr->InputAt(0);
3991 __ tst(ToRegister(input), Operand(kSmiTagMask));
3992 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003993}
3994
3995
3996void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003997 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003998 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01003999
4000 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
4001 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004002
Ben Murdoch257744e2011-11-30 15:57:28 +00004003 if (instr->hydrogen()->is_interval_check()) {
4004 InstanceType first;
4005 InstanceType last;
4006 instr->hydrogen()->GetCheckInterval(&first, &last);
4007
4008 __ cmp(scratch, Operand(first));
4009
4010 // If there is only one type in the interval check for equality.
4011 if (first == last) {
4012 DeoptimizeIf(ne, instr->environment());
4013 } else {
4014 DeoptimizeIf(lo, instr->environment());
4015 // Omit check for the last type.
4016 if (last != LAST_TYPE) {
4017 __ cmp(scratch, Operand(last));
4018 DeoptimizeIf(hi, instr->environment());
4019 }
4020 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01004021 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004022 uint8_t mask;
4023 uint8_t tag;
4024 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4025
4026 if (IsPowerOf2(mask)) {
4027 ASSERT(tag == 0 || IsPowerOf2(tag));
4028 __ tst(scratch, Operand(mask));
4029 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment());
4030 } else {
4031 __ and_(scratch, scratch, Operand(mask));
4032 __ cmp(scratch, Operand(tag));
4033 DeoptimizeIf(ne, instr->environment());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004034 }
4035 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004036}
4037
4038
4039void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004040 ASSERT(instr->InputAt(0)->IsRegister());
4041 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004042 __ cmp(reg, Operand(instr->hydrogen()->target()));
4043 DeoptimizeIf(ne, instr->environment());
4044}
4045
4046
4047void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004048 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01004049 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004050 ASSERT(input->IsRegister());
4051 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01004052 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
4053 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004054 DeoptimizeIf(ne, instr->environment());
4055}
4056
4057
Ben Murdoch257744e2011-11-30 15:57:28 +00004058void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4059 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
4060 Register result_reg = ToRegister(instr->result());
4061 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4062 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
4063}
4064
4065
4066void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4067 Register unclamped_reg = ToRegister(instr->unclamped());
4068 Register result_reg = ToRegister(instr->result());
4069 __ ClampUint8(result_reg, unclamped_reg);
4070}
4071
4072
4073void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4074 Register scratch = scratch0();
4075 Register input_reg = ToRegister(instr->unclamped());
4076 Register result_reg = ToRegister(instr->result());
4077 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4078 Label is_smi, done, heap_number;
4079
4080 // Both smi and heap number cases are handled.
4081 __ JumpIfSmi(input_reg, &is_smi);
4082
4083 // Check for heap number
4084 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4085 __ cmp(scratch, Operand(factory()->heap_number_map()));
4086 __ b(eq, &heap_number);
4087
4088 // Check for undefined. Undefined is converted to zero for clamping
4089 // conversions.
4090 __ cmp(input_reg, Operand(factory()->undefined_value()));
4091 DeoptimizeIf(ne, instr->environment());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004092 __ mov(result_reg, Operand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00004093 __ jmp(&done);
4094
4095 // Heap number
4096 __ bind(&heap_number);
4097 __ vldr(double_scratch0(), FieldMemOperand(input_reg,
4098 HeapNumber::kValueOffset));
4099 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
4100 __ jmp(&done);
4101
4102 // smi
4103 __ bind(&is_smi);
4104 __ SmiUntag(result_reg, input_reg);
4105 __ ClampUint8(result_reg, result_reg);
4106
4107 __ bind(&done);
4108}
4109
4110
Ben Murdochb8e0da22011-05-16 14:20:40 +01004111void LCodeGen::LoadHeapObject(Register result,
4112 Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01004113 if (heap()->InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01004114 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01004115 factory()->NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01004116 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01004117 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01004118 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004119 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01004120 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004121}
4122
4123
4124void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004125 Register temp1 = ToRegister(instr->TempAt(0));
4126 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01004127
4128 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01004129 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01004130
4131 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004132 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004133
4134 // Check prototype maps up to the holder.
4135 while (!current_prototype.is_identical_to(holder)) {
4136 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4137 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4138 DeoptimizeIf(ne, instr->environment());
4139 current_prototype =
4140 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4141 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004142 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004143 }
4144
4145 // Check the holder map.
4146 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4147 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4148 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004149}
4150
4151
4152void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004153 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4154 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4155 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4156 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
4157 __ Push(r3, r2, r1);
4158
4159 // Pick the right runtime function or stub to call.
4160 int length = instr->hydrogen()->length();
4161 if (instr->hydrogen()->IsCopyOnWrite()) {
4162 ASSERT(instr->hydrogen()->depth() == 1);
4163 FastCloneShallowArrayStub::Mode mode =
4164 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4165 FastCloneShallowArrayStub stub(mode, length);
4166 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4167 } else if (instr->hydrogen()->depth() > 1) {
4168 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4169 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4170 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4171 } else {
4172 FastCloneShallowArrayStub::Mode mode =
4173 FastCloneShallowArrayStub::CLONE_ELEMENTS;
4174 FastCloneShallowArrayStub stub(mode, length);
4175 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4176 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004177}
4178
4179
4180void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004181 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4182 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
4183 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4184 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
4185 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
4186 __ Push(r4, r3, r2, r1);
4187
4188 // Pick the right runtime function to call.
4189 if (instr->hydrogen()->depth() > 1) {
4190 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4191 } else {
4192 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4193 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004194}
4195
4196
Steve Block44f0eee2011-05-26 01:26:41 +01004197void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4198 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
4199 __ push(r0);
4200 CallRuntime(Runtime::kToFastProperties, 1, instr);
4201}
4202
4203
Ben Murdochb0fe1622011-05-05 13:52:32 +01004204void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004205 Label materialized;
4206 // Registers will be used as follows:
4207 // r3 = JS function.
4208 // r7 = literals array.
4209 // r1 = regexp literal.
4210 // r0 = regexp literal clone.
4211 // r2 and r4-r6 are used as temporaries.
4212 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4213 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4214 int literal_offset = FixedArray::kHeaderSize +
4215 instr->hydrogen()->literal_index() * kPointerSize;
4216 __ ldr(r1, FieldMemOperand(r7, literal_offset));
4217 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4218 __ cmp(r1, ip);
4219 __ b(ne, &materialized);
4220
4221 // Create regexp literal using runtime function
4222 // Result will be in r0.
4223 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4224 __ mov(r5, Operand(instr->hydrogen()->pattern()));
4225 __ mov(r4, Operand(instr->hydrogen()->flags()));
4226 __ Push(r7, r6, r5, r4);
4227 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4228 __ mov(r1, r0);
4229
4230 __ bind(&materialized);
4231 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4232 Label allocated, runtime_allocate;
4233
4234 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
4235 __ jmp(&allocated);
4236
4237 __ bind(&runtime_allocate);
4238 __ mov(r0, Operand(Smi::FromInt(size)));
4239 __ Push(r1, r0);
4240 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4241 __ pop(r1);
4242
4243 __ bind(&allocated);
4244 // Copy the content into the newly allocated memory.
4245 // (Unroll copy loop once for better throughput).
4246 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4247 __ ldr(r3, FieldMemOperand(r1, i));
4248 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
4249 __ str(r3, FieldMemOperand(r0, i));
4250 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
4251 }
4252 if ((size % (2 * kPointerSize)) != 0) {
4253 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
4254 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
4255 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004256}
4257
4258
4259void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004260 // Use the fast case closure allocation code that allocates in new
4261 // space for nested functions that don't need literals cloning.
4262 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004263 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004264 if (!pretenure && shared_info->num_literals() == 0) {
4265 FastNewClosureStub stub(
4266 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004267 __ mov(r1, Operand(shared_info));
4268 __ push(r1);
4269 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4270 } else {
4271 __ mov(r2, Operand(shared_info));
4272 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004273 ? factory()->true_value()
4274 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004275 __ Push(cp, r2, r1);
4276 CallRuntime(Runtime::kNewClosure, 3, instr);
4277 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004278}
4279
4280
4281void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004282 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004283 __ push(input);
4284 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004285}
4286
4287
Ben Murdochb0fe1622011-05-05 13:52:32 +01004288void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004289 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004290 int true_block = chunk_->LookupDestination(instr->true_block_id());
4291 int false_block = chunk_->LookupDestination(instr->false_block_id());
4292 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4293 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4294
4295 Condition final_branch_condition = EmitTypeofIs(true_label,
4296 false_label,
4297 input,
4298 instr->type_literal());
4299
4300 EmitBranch(true_block, false_block, final_branch_condition);
4301}
4302
4303
4304Condition LCodeGen::EmitTypeofIs(Label* true_label,
4305 Label* false_label,
4306 Register input,
4307 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004308 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004309 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004310 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004311 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004312 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4313 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4314 __ cmp(input, Operand(ip));
4315 final_branch_condition = eq;
4316
Steve Block44f0eee2011-05-26 01:26:41 +01004317 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004318 __ JumpIfSmi(input, false_label);
4319 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4320 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004321 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4322 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004323 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004324
Steve Block44f0eee2011-05-26 01:26:41 +01004325 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004326 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004327 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004328 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004329 final_branch_condition = eq;
4330
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004331 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4332 __ CompareRoot(input, Heap::kNullValueRootIndex);
4333 final_branch_condition = eq;
4334
Steve Block44f0eee2011-05-26 01:26:41 +01004335 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004336 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004337 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004338 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004339 // Check for undetectable objects => true.
4340 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4341 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4342 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4343 final_branch_condition = ne;
4344
Steve Block44f0eee2011-05-26 01:26:41 +01004345 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004346 __ JumpIfSmi(input, false_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004347 __ CompareObjectType(input, input, scratch,
4348 FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004349 final_branch_condition = ge;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004350
Steve Block44f0eee2011-05-26 01:26:41 +01004351 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004352 __ JumpIfSmi(input, false_label);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004353 if (!FLAG_harmony_typeof) {
4354 __ CompareRoot(input, Heap::kNullValueRootIndex);
4355 __ b(eq, true_label);
4356 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004357 __ CompareObjectType(input, input, scratch,
4358 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4359 __ b(lt, false_label);
4360 __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4361 __ b(gt, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004362 // Check for undetectable objects => false.
4363 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4364 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004365 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004366
4367 } else {
4368 final_branch_condition = ne;
4369 __ b(false_label);
4370 // A dead branch instruction will be generated after this point.
4371 }
4372
4373 return final_branch_condition;
4374}
4375
4376
Steve Block1e0659c2011-05-24 12:43:12 +01004377void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4378 Register temp1 = ToRegister(instr->TempAt(0));
4379 int true_block = chunk_->LookupDestination(instr->true_block_id());
4380 int false_block = chunk_->LookupDestination(instr->false_block_id());
4381
4382 EmitIsConstructCall(temp1, scratch0());
4383 EmitBranch(true_block, false_block, eq);
4384}
4385
4386
4387void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4388 ASSERT(!temp1.is(temp2));
4389 // Get the frame pointer for the calling frame.
4390 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4391
4392 // Skip the arguments adaptor frame if it exists.
4393 Label check_frame_marker;
4394 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4395 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4396 __ b(ne, &check_frame_marker);
4397 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4398
4399 // Check the marker in the calling frame.
4400 __ bind(&check_frame_marker);
4401 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4402 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4403}
4404
4405
Ben Murdochb0fe1622011-05-05 13:52:32 +01004406void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4407 // No code for lazy bailout instruction. Used to capture environment after a
4408 // call for populating the safepoint data with deoptimization data.
4409}
4410
4411
4412void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004413 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004414}
4415
4416
4417void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004418 Register object = ToRegister(instr->object());
4419 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004420 Register strict = scratch0();
4421 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4422 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004423 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4424 LPointerMap* pointers = instr->pointer_map();
4425 LEnvironment* env = instr->deoptimization_environment();
4426 RecordPosition(pointers->position());
4427 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01004428 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01004429 pointers,
4430 env->deoptimization_index());
Ben Murdoch257744e2011-11-30 15:57:28 +00004431 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4432}
4433
4434
4435void LCodeGen::DoIn(LIn* instr) {
4436 Register obj = ToRegister(instr->object());
4437 Register key = ToRegister(instr->key());
4438 __ Push(key, obj);
4439 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4440 LPointerMap* pointers = instr->pointer_map();
4441 LEnvironment* env = instr->deoptimization_environment();
4442 RecordPosition(pointers->position());
4443 RegisterEnvironmentForDeoptimization(env);
4444 SafepointGenerator safepoint_generator(this,
4445 pointers,
4446 env->deoptimization_index());
4447 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004448}
4449
4450
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004451void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
4452 {
4453 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4454 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4455 RegisterLazyDeoptimization(
4456 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4457 }
4458
4459 // The gap code includes the restoring of the safepoint registers.
4460 int pc = masm()->pc_offset();
4461 safepoints_.SetPcAfterGap(pc);
4462}
4463
4464
Ben Murdochb0fe1622011-05-05 13:52:32 +01004465void LCodeGen::DoStackCheck(LStackCheck* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004466 class DeferredStackCheck: public LDeferredCode {
4467 public:
4468 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4469 : LDeferredCode(codegen), instr_(instr) { }
4470 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4471 private:
4472 LStackCheck* instr_;
4473 };
4474
4475 if (instr->hydrogen()->is_function_entry()) {
4476 // Perform stack overflow check.
4477 Label done;
4478 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4479 __ cmp(sp, Operand(ip));
4480 __ b(hs, &done);
4481 StackCheckStub stub;
4482 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4483 __ bind(&done);
4484 } else {
4485 ASSERT(instr->hydrogen()->is_backwards_branch());
4486 // Perform stack overflow check if this goto needs it before jumping.
4487 DeferredStackCheck* deferred_stack_check =
4488 new DeferredStackCheck(this, instr);
4489 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4490 __ cmp(sp, Operand(ip));
4491 __ b(lo, deferred_stack_check->entry());
4492 __ bind(instr->done_label());
4493 deferred_stack_check->SetExit(instr->done_label());
4494 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004495}
4496
4497
4498void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004499 // This is a pseudo-instruction that ensures that the environment here is
4500 // properly registered for deoptimization and records the assembler's PC
4501 // offset.
4502 LEnvironment* environment = instr->environment();
4503 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4504 instr->SpilledDoubleRegisterArray());
4505
4506 // If the environment were already registered, we would have no way of
4507 // backpatching it with the spill slot operands.
4508 ASSERT(!environment->HasBeenRegistered());
4509 RegisterEnvironmentForDeoptimization(environment);
4510 ASSERT(osr_pc_offset_ == -1);
4511 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004512}
4513
4514
Ben Murdoch257744e2011-11-30 15:57:28 +00004515
4516
Ben Murdochb0fe1622011-05-05 13:52:32 +01004517#undef __
4518
4519} } // namespace v8::internal