blob: 4a201ab987be6009ba133e7a0f5745f86603c8f0 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +000043 Safepoint::DeoptMode mode)
Ben Murdochb0fe1622011-05-05 13:52:32 +010044 : codegen_(codegen),
45 pointers_(pointers),
Ben Murdoch2b4ba112012-01-20 14:57:15 +000046 deopt_mode_(mode) { }
Ben Murdochb0fe1622011-05-05 13:52:32 +010047 virtual ~SafepointGenerator() { }
48
Ben Murdoch2b4ba112012-01-20 14:57:15 +000049 virtual void BeforeCall(int call_size) const { }
Steve Block44f0eee2011-05-26 01:26:41 +010050
Ben Murdoch257744e2011-11-30 15:57:28 +000051 virtual void AfterCall() const {
Ben Murdoch2b4ba112012-01-20 14:57:15 +000052 codegen_->RecordSafepoint(pointers_, deopt_mode_);
Ben Murdochb0fe1622011-05-05 13:52:32 +010053 }
54
55 private:
56 LCodeGen* codegen_;
57 LPointerMap* pointers_;
Ben Murdoch2b4ba112012-01-20 14:57:15 +000058 Safepoint::DeoptMode deopt_mode_;
Ben Murdochb0fe1622011-05-05 13:52:32 +010059};
60
61
62#define __ masm()->
63
64bool LCodeGen::GenerateCode() {
65 HPhase phase("Code generation", chunk());
66 ASSERT(is_unused());
67 status_ = GENERATING;
68 CpuFeatures::Scope scope1(VFP3);
69 CpuFeatures::Scope scope2(ARMv7);
70 return GeneratePrologue() &&
71 GenerateBody() &&
72 GenerateDeferredCode() &&
Ben Murdoch257744e2011-11-30 15:57:28 +000073 GenerateDeoptJumpTable() &&
Ben Murdochb0fe1622011-05-05 13:52:32 +010074 GenerateSafepointTable();
75}
76
77
78void LCodeGen::FinishCode(Handle<Code> code) {
79 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000080 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010081 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010082 PopulateDeoptimizationData(code);
83}
84
85
86void LCodeGen::Abort(const char* format, ...) {
87 if (FLAG_trace_bailout) {
Ben Murdoch589d6972011-11-30 16:04:58 +000088 SmartArrayPointer<char> name(
89 info()->shared_info()->DebugName()->ToCString());
Ben Murdoche0cee9b2011-05-25 10:26:03 +010090 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +010091 va_list arguments;
92 va_start(arguments, format);
93 OS::VPrint(format, arguments);
94 va_end(arguments);
95 PrintF("\n");
96 }
97 status_ = ABORTED;
98}
99
100
101void LCodeGen::Comment(const char* format, ...) {
102 if (!FLAG_code_comments) return;
103 char buffer[4 * KB];
104 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
105 va_list arguments;
106 va_start(arguments, format);
107 builder.AddFormattedList(format, arguments);
108 va_end(arguments);
109
110 // Copy the string before recording it in the assembler to avoid
111 // issues when the stack allocated buffer goes out of scope.
112 size_t length = builder.position();
113 Vector<char> copy = Vector<char>::New(length + 1);
114 memcpy(copy.start(), builder.Finalize(), copy.length());
115 masm()->RecordComment(copy.start());
116}
117
118
119bool LCodeGen::GeneratePrologue() {
120 ASSERT(is_generating());
121
122#ifdef DEBUG
123 if (strlen(FLAG_stop_at) > 0 &&
124 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
125 __ stop("stop_at");
126 }
127#endif
128
129 // r1: Callee's JS function.
130 // cp: Callee's context.
131 // fp: Caller's frame pointer.
132 // lr: Caller's pc.
133
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000134 // Strict mode functions and builtins need to replace the receiver
135 // with undefined when called as functions (without an explicit
136 // receiver object). r5 is zero for method calls and non-zero for
137 // function calls.
138 if (info_->is_strict_mode() || info_->is_native()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000139 Label ok;
140 __ cmp(r5, Operand(0));
141 __ b(eq, &ok);
142 int receiver_offset = scope()->num_parameters() * kPointerSize;
143 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
144 __ str(r2, MemOperand(sp, receiver_offset));
145 __ bind(&ok);
146 }
147
Ben Murdochb0fe1622011-05-05 13:52:32 +0100148 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
149 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
150
151 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000152 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100153 if (slots > 0) {
154 if (FLAG_debug_code) {
155 __ mov(r0, Operand(slots));
156 __ mov(r2, Operand(kSlotsZapValue));
157 Label loop;
158 __ bind(&loop);
159 __ push(r2);
160 __ sub(r0, r0, Operand(1), SetCC);
161 __ b(ne, &loop);
162 } else {
163 __ sub(sp, sp, Operand(slots * kPointerSize));
164 }
165 }
166
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100167 // Possibly allocate a local context.
168 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
169 if (heap_slots > 0) {
170 Comment(";;; Allocate local context");
171 // Argument to NewContext is the function, which is in r1.
172 __ push(r1);
173 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
174 FastNewContextStub stub(heap_slots);
175 __ CallStub(&stub);
176 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000177 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100178 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000179 RecordSafepoint(Safepoint::kNoLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100180 // Context is returned in both r0 and cp. It replaces the context
181 // passed to us. It's saved in the stack and kept live in cp.
182 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
183 // Copy any necessary parameters into the context.
184 int num_parameters = scope()->num_parameters();
185 for (int i = 0; i < num_parameters; i++) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000186 Variable* var = scope()->parameter(i);
187 if (var->IsContextSlot()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100188 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
189 (num_parameters - 1 - i) * kPointerSize;
190 // Load parameter from stack.
191 __ ldr(r0, MemOperand(fp, parameter_offset));
192 // Store it in the context.
Ben Murdoch589d6972011-11-30 16:04:58 +0000193 __ mov(r1, Operand(Context::SlotOffset(var->index())));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100194 __ str(r0, MemOperand(cp, r1));
195 // Update the write barrier. This clobbers all involved
196 // registers, so we have to use two more registers to avoid
197 // clobbering cp.
198 __ mov(r2, Operand(cp));
199 __ RecordWrite(r2, Operand(r1), r3, r0);
200 }
201 }
202 Comment(";;; End allocate local context");
203 }
204
Ben Murdochb0fe1622011-05-05 13:52:32 +0100205 // Trace the call.
206 if (FLAG_trace) {
207 __ CallRuntime(Runtime::kTraceEnter, 0);
208 }
209 return !is_aborted();
210}
211
212
213bool LCodeGen::GenerateBody() {
214 ASSERT(is_generating());
215 bool emit_instructions = true;
216 for (current_instruction_ = 0;
217 !is_aborted() && current_instruction_ < instructions_->length();
218 current_instruction_++) {
219 LInstruction* instr = instructions_->at(current_instruction_);
220 if (instr->IsLabel()) {
221 LLabel* label = LLabel::cast(instr);
222 emit_instructions = !label->HasReplacement();
223 }
224
225 if (emit_instructions) {
226 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
227 instr->CompileToNative(this);
228 }
229 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000230 EnsureSpaceForLazyDeopt();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100231 return !is_aborted();
232}
233
234
Ben Murdochb0fe1622011-05-05 13:52:32 +0100235bool LCodeGen::GenerateDeferredCode() {
236 ASSERT(is_generating());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000237 if (deferred_.length() > 0) {
238 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
239 LDeferredCode* code = deferred_[i];
240 __ bind(code->entry());
241 code->Generate();
242 __ jmp(code->exit());
243 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100244 }
245
Ben Murdoch257744e2011-11-30 15:57:28 +0000246 // Force constant pool emission at the end of the deferred code to make
247 // sure that no constant pools are emitted after.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100248 masm()->CheckConstPool(true, false);
249
Ben Murdoch257744e2011-11-30 15:57:28 +0000250 return !is_aborted();
251}
252
253
254bool LCodeGen::GenerateDeoptJumpTable() {
255 // Check that the jump table is accessible from everywhere in the function
256 // code, ie that offsets to the table can be encoded in the 24bit signed
257 // immediate of a branch instruction.
258 // To simplify we consider the code size from the first instruction to the
259 // end of the jump table. We also don't consider the pc load delta.
260 // Each entry in the jump table generates one instruction and inlines one
261 // 32bit data after it.
262 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
263 deopt_jump_table_.length() * 2)) {
264 Abort("Generated code is too large");
265 }
266
267 // Block the constant pool emission during the jump table emission.
268 __ BlockConstPoolFor(deopt_jump_table_.length());
269 __ RecordComment("[ Deoptimisation jump table");
270 Label table_start;
271 __ bind(&table_start);
272 for (int i = 0; i < deopt_jump_table_.length(); i++) {
273 __ bind(&deopt_jump_table_[i].label);
274 __ ldr(pc, MemOperand(pc, Assembler::kInstrSize - Assembler::kPcLoadDelta));
275 __ dd(reinterpret_cast<uint32_t>(deopt_jump_table_[i].address));
276 }
277 ASSERT(masm()->InstructionsGeneratedSince(&table_start) ==
278 deopt_jump_table_.length() * 2);
279 __ RecordComment("]");
280
281 // The deoptimization jump table is the last part of the instruction
282 // sequence. Mark the generated code as done unless we bailed out.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100283 if (!is_aborted()) status_ = DONE;
284 return !is_aborted();
285}
286
287
288bool LCodeGen::GenerateSafepointTable() {
289 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +0000290 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100291 return !is_aborted();
292}
293
294
295Register LCodeGen::ToRegister(int index) const {
296 return Register::FromAllocationIndex(index);
297}
298
299
300DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
301 return DoubleRegister::FromAllocationIndex(index);
302}
303
304
305Register LCodeGen::ToRegister(LOperand* op) const {
306 ASSERT(op->IsRegister());
307 return ToRegister(op->index());
308}
309
310
311Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
312 if (op->IsRegister()) {
313 return ToRegister(op->index());
314 } else if (op->IsConstantOperand()) {
315 __ mov(scratch, ToOperand(op));
316 return scratch;
317 } else if (op->IsStackSlot() || op->IsArgument()) {
318 __ ldr(scratch, ToMemOperand(op));
319 return scratch;
320 }
321 UNREACHABLE();
322 return scratch;
323}
324
325
326DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
327 ASSERT(op->IsDoubleRegister());
328 return ToDoubleRegister(op->index());
329}
330
331
332DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
333 SwVfpRegister flt_scratch,
334 DoubleRegister dbl_scratch) {
335 if (op->IsDoubleRegister()) {
336 return ToDoubleRegister(op->index());
337 } else if (op->IsConstantOperand()) {
338 LConstantOperand* const_op = LConstantOperand::cast(op);
339 Handle<Object> literal = chunk_->LookupLiteral(const_op);
340 Representation r = chunk_->LookupLiteralRepresentation(const_op);
341 if (r.IsInteger32()) {
342 ASSERT(literal->IsNumber());
343 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
344 __ vmov(flt_scratch, ip);
345 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
346 return dbl_scratch;
347 } else if (r.IsDouble()) {
348 Abort("unsupported double immediate");
349 } else if (r.IsTagged()) {
350 Abort("unsupported tagged immediate");
351 }
352 } else if (op->IsStackSlot() || op->IsArgument()) {
353 // TODO(regis): Why is vldr not taking a MemOperand?
354 // __ vldr(dbl_scratch, ToMemOperand(op));
355 MemOperand mem_op = ToMemOperand(op);
356 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
357 return dbl_scratch;
358 }
359 UNREACHABLE();
360 return dbl_scratch;
361}
362
363
364int LCodeGen::ToInteger32(LConstantOperand* op) const {
365 Handle<Object> value = chunk_->LookupLiteral(op);
366 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
367 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
368 value->Number());
369 return static_cast<int32_t>(value->Number());
370}
371
372
373Operand LCodeGen::ToOperand(LOperand* op) {
374 if (op->IsConstantOperand()) {
375 LConstantOperand* const_op = LConstantOperand::cast(op);
376 Handle<Object> literal = chunk_->LookupLiteral(const_op);
377 Representation r = chunk_->LookupLiteralRepresentation(const_op);
378 if (r.IsInteger32()) {
379 ASSERT(literal->IsNumber());
380 return Operand(static_cast<int32_t>(literal->Number()));
381 } else if (r.IsDouble()) {
382 Abort("ToOperand Unsupported double immediate.");
383 }
384 ASSERT(r.IsTagged());
385 return Operand(literal);
386 } else if (op->IsRegister()) {
387 return Operand(ToRegister(op));
388 } else if (op->IsDoubleRegister()) {
389 Abort("ToOperand IsDoubleRegister unimplemented");
390 return Operand(0);
391 }
392 // Stack slots not implemented, use ToMemOperand instead.
393 UNREACHABLE();
394 return Operand(0);
395}
396
397
398MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100399 ASSERT(!op->IsRegister());
400 ASSERT(!op->IsDoubleRegister());
401 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
402 int index = op->index();
403 if (index >= 0) {
404 // Local or spill slot. Skip the frame pointer, function, and
405 // context in the fixed part of the frame.
406 return MemOperand(fp, -(index + 3) * kPointerSize);
407 } else {
408 // Incoming parameter. Skip the return address.
409 return MemOperand(fp, -(index - 1) * kPointerSize);
410 }
411}
412
413
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100414MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
415 ASSERT(op->IsDoubleStackSlot());
416 int index = op->index();
417 if (index >= 0) {
418 // Local or spill slot. Skip the frame pointer, function, context,
419 // and the first word of the double in the fixed part of the frame.
420 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
421 } else {
422 // Incoming parameter. Skip the return address and the first word of
423 // the double.
424 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
425 }
426}
427
428
Ben Murdochb8e0da22011-05-16 14:20:40 +0100429void LCodeGen::WriteTranslation(LEnvironment* environment,
430 Translation* translation) {
431 if (environment == NULL) return;
432
433 // The translation includes one command per value in the environment.
434 int translation_size = environment->values()->length();
435 // The output frame height does not include the parameters.
436 int height = translation_size - environment->parameter_count();
437
438 WriteTranslation(environment->outer(), translation);
439 int closure_id = DefineDeoptimizationLiteral(environment->closure());
440 translation->BeginFrame(environment->ast_id(), closure_id, height);
441 for (int i = 0; i < translation_size; ++i) {
442 LOperand* value = environment->values()->at(i);
443 // spilled_registers_ and spilled_double_registers_ are either
444 // both NULL or both set.
445 if (environment->spilled_registers() != NULL && value != NULL) {
446 if (value->IsRegister() &&
447 environment->spilled_registers()[value->index()] != NULL) {
448 translation->MarkDuplicate();
449 AddToTranslation(translation,
450 environment->spilled_registers()[value->index()],
451 environment->HasTaggedValueAt(i));
452 } else if (
453 value->IsDoubleRegister() &&
454 environment->spilled_double_registers()[value->index()] != NULL) {
455 translation->MarkDuplicate();
456 AddToTranslation(
457 translation,
458 environment->spilled_double_registers()[value->index()],
459 false);
460 }
461 }
462
463 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
464 }
465}
466
467
Ben Murdochb0fe1622011-05-05 13:52:32 +0100468void LCodeGen::AddToTranslation(Translation* translation,
469 LOperand* op,
470 bool is_tagged) {
471 if (op == NULL) {
472 // TODO(twuerthinger): Introduce marker operands to indicate that this value
473 // is not present and must be reconstructed from the deoptimizer. Currently
474 // this is only used for the arguments object.
475 translation->StoreArgumentsObject();
476 } else if (op->IsStackSlot()) {
477 if (is_tagged) {
478 translation->StoreStackSlot(op->index());
479 } else {
480 translation->StoreInt32StackSlot(op->index());
481 }
482 } else if (op->IsDoubleStackSlot()) {
483 translation->StoreDoubleStackSlot(op->index());
484 } else if (op->IsArgument()) {
485 ASSERT(is_tagged);
Ben Murdoch257744e2011-11-30 15:57:28 +0000486 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100487 translation->StoreStackSlot(src_index);
488 } else if (op->IsRegister()) {
489 Register reg = ToRegister(op);
490 if (is_tagged) {
491 translation->StoreRegister(reg);
492 } else {
493 translation->StoreInt32Register(reg);
494 }
495 } else if (op->IsDoubleRegister()) {
496 DoubleRegister reg = ToDoubleRegister(op);
497 translation->StoreDoubleRegister(reg);
498 } else if (op->IsConstantOperand()) {
499 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
500 int src_index = DefineDeoptimizationLiteral(literal);
501 translation->StoreLiteral(src_index);
502 } else {
503 UNREACHABLE();
504 }
505}
506
507
508void LCodeGen::CallCode(Handle<Code> code,
509 RelocInfo::Mode mode,
510 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100511 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
512}
513
514
515void LCodeGen::CallCodeGeneric(Handle<Code> code,
516 RelocInfo::Mode mode,
517 LInstruction* instr,
518 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100519 ASSERT(instr != NULL);
520 LPointerMap* pointers = instr->pointer_map();
521 RecordPosition(pointers->position());
522 __ Call(code, mode);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000523 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
Ben Murdoch18a6f572011-07-25 17:16:09 +0100524
525 // Signal that we don't inline smi code before these stubs in the
526 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000527 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdoch18a6f572011-07-25 17:16:09 +0100528 code->kind() == Code::COMPARE_IC) {
529 __ nop();
530 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100531}
532
533
Steve Block44f0eee2011-05-26 01:26:41 +0100534void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100535 int num_arguments,
536 LInstruction* instr) {
537 ASSERT(instr != NULL);
538 LPointerMap* pointers = instr->pointer_map();
539 ASSERT(pointers != NULL);
540 RecordPosition(pointers->position());
541
542 __ CallRuntime(function, num_arguments);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000543 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100544}
545
546
Ben Murdoch8b112d22011-06-08 16:22:53 +0100547void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
548 int argc,
549 LInstruction* instr) {
550 __ CallRuntimeSaveDoubles(id);
551 RecordSafepointWithRegisters(
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000552 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100553}
554
555
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000556void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
557 Safepoint::DeoptMode mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100558 if (!environment->HasBeenRegistered()) {
559 // Physical stack frame layout:
560 // -x ............. -4 0 ..................................... y
561 // [incoming arguments] [spill slots] [pushed outgoing arguments]
562
563 // Layout of the environment:
564 // 0 ..................................................... size-1
565 // [parameters] [locals] [expression stack including arguments]
566
567 // Layout of the translation:
568 // 0 ........................................................ size - 1 + 4
569 // [expression stack including arguments] [locals] [4 words] [parameters]
570 // |>------------ translation_size ------------<|
571
572 int frame_count = 0;
573 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
574 ++frame_count;
575 }
576 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100577 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100578 int deoptimization_index = deoptimizations_.length();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000579 int pc_offset = masm()->pc_offset();
580 environment->Register(deoptimization_index,
581 translation.index(),
582 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100583 deoptimizations_.Add(environment);
584 }
585}
586
587
588void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000589 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100590 ASSERT(environment->HasBeenRegistered());
591 int id = environment->deoptimization_index();
592 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
593 ASSERT(entry != NULL);
594 if (entry == NULL) {
595 Abort("bailout was not prepared");
596 return;
597 }
598
599 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
600
601 if (FLAG_deopt_every_n_times == 1 &&
602 info_->shared_info()->opt_count() == id) {
603 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
604 return;
605 }
606
Ben Murdoch257744e2011-11-30 15:57:28 +0000607 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt", cc);
608
Steve Block1e0659c2011-05-24 12:43:12 +0100609 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100610 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
611 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000612 // We often have several deopts to the same entry, reuse the last
613 // jump entry if this is the case.
614 if (deopt_jump_table_.is_empty() ||
615 (deopt_jump_table_.last().address != entry)) {
616 deopt_jump_table_.Add(JumpTableEntry(entry));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100617 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000618 __ b(cc, &deopt_jump_table_.last().label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100619 }
620}
621
622
623void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
624 int length = deoptimizations_.length();
625 if (length == 0) return;
626 ASSERT(FLAG_deopt);
627 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100628 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100629
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100630 Handle<ByteArray> translations = translations_.CreateByteArray();
631 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100632 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
633
634 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100635 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100636 for (int i = 0; i < deoptimization_literals_.length(); i++) {
637 literals->set(i, *deoptimization_literals_[i]);
638 }
639 data->SetLiteralArray(*literals);
640
641 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
642 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
643
644 // Populate the deoptimization entries.
645 for (int i = 0; i < length; i++) {
646 LEnvironment* env = deoptimizations_[i];
647 data->SetAstId(i, Smi::FromInt(env->ast_id()));
648 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
649 data->SetArgumentsStackHeight(i,
650 Smi::FromInt(env->arguments_stack_height()));
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000651 data->SetPc(i, Smi::FromInt(env->pc_offset()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100652 }
653 code->set_deoptimization_data(*data);
654}
655
656
657int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
658 int result = deoptimization_literals_.length();
659 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
660 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
661 }
662 deoptimization_literals_.Add(literal);
663 return result;
664}
665
666
667void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
668 ASSERT(deoptimization_literals_.length() == 0);
669
670 const ZoneList<Handle<JSFunction> >* inlined_closures =
671 chunk()->inlined_closures();
672
673 for (int i = 0, length = inlined_closures->length();
674 i < length;
675 i++) {
676 DefineDeoptimizationLiteral(inlined_closures->at(i));
677 }
678
679 inlined_function_count_ = deoptimization_literals_.length();
680}
681
682
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000683void LCodeGen::RecordSafepointWithLazyDeopt(
684 LInstruction* instr, SafepointMode safepoint_mode) {
685 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
686 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
687 } else {
688 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
689 RecordSafepointWithRegisters(
690 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
691 }
692}
693
694
Steve Block1e0659c2011-05-24 12:43:12 +0100695void LCodeGen::RecordSafepoint(
696 LPointerMap* pointers,
697 Safepoint::Kind kind,
698 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000699 Safepoint::DeoptMode deopt_mode) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100700 ASSERT(expected_safepoint_kind_ == kind);
701
Ben Murdochb0fe1622011-05-05 13:52:32 +0100702 const ZoneList<LOperand*>* operands = pointers->operands();
703 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000704 kind, arguments, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100705 for (int i = 0; i < operands->length(); i++) {
706 LOperand* pointer = operands->at(i);
707 if (pointer->IsStackSlot()) {
708 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100709 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
710 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100711 }
712 }
Steve Block1e0659c2011-05-24 12:43:12 +0100713 if (kind & Safepoint::kWithRegisters) {
714 // Register cp always contains a pointer to the context.
715 safepoint.DefinePointerRegister(cp);
716 }
717}
718
719
720void LCodeGen::RecordSafepoint(LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000721 Safepoint::DeoptMode deopt_mode) {
722 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100723}
724
725
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000726void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100727 LPointerMap empty_pointers(RelocInfo::kNoPosition);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000728 RecordSafepoint(&empty_pointers, deopt_mode);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100729}
730
731
Ben Murdochb0fe1622011-05-05 13:52:32 +0100732void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
733 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000734 Safepoint::DeoptMode deopt_mode) {
735 RecordSafepoint(
736 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100737}
738
739
Ben Murdochb8e0da22011-05-16 14:20:40 +0100740void LCodeGen::RecordSafepointWithRegistersAndDoubles(
741 LPointerMap* pointers,
742 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000743 Safepoint::DeoptMode deopt_mode) {
744 RecordSafepoint(
745 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100746}
747
748
Ben Murdochb0fe1622011-05-05 13:52:32 +0100749void LCodeGen::RecordPosition(int position) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000750 if (position == RelocInfo::kNoPosition) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100751 masm()->positions_recorder()->RecordPosition(position);
752}
753
754
755void LCodeGen::DoLabel(LLabel* label) {
756 if (label->is_loop_header()) {
757 Comment(";;; B%d - LOOP entry", label->block_id());
758 } else {
759 Comment(";;; B%d", label->block_id());
760 }
761 __ bind(label->label());
762 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +0000763 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100764}
765
766
767void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100768 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100769}
770
771
772void LCodeGen::DoGap(LGap* gap) {
773 for (int i = LGap::FIRST_INNER_POSITION;
774 i <= LGap::LAST_INNER_POSITION;
775 i++) {
776 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
777 LParallelMove* move = gap->GetParallelMove(inner_pos);
778 if (move != NULL) DoParallelMove(move);
779 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100780}
781
782
Ben Murdoch257744e2011-11-30 15:57:28 +0000783void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
784 DoGap(instr);
785}
786
787
Ben Murdochb0fe1622011-05-05 13:52:32 +0100788void LCodeGen::DoParameter(LParameter* instr) {
789 // Nothing to do.
790}
791
792
793void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100794 ASSERT(ToRegister(instr->result()).is(r0));
795 switch (instr->hydrogen()->major_key()) {
796 case CodeStub::RegExpConstructResult: {
797 RegExpConstructResultStub stub;
798 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
799 break;
800 }
801 case CodeStub::RegExpExec: {
802 RegExpExecStub stub;
803 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
804 break;
805 }
806 case CodeStub::SubString: {
807 SubStringStub stub;
808 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
809 break;
810 }
Steve Block9fac8402011-05-12 15:51:54 +0100811 case CodeStub::NumberToString: {
812 NumberToStringStub stub;
813 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
814 break;
815 }
816 case CodeStub::StringAdd: {
817 StringAddStub stub(NO_STRING_ADD_FLAGS);
818 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
819 break;
820 }
821 case CodeStub::StringCompare: {
822 StringCompareStub stub;
823 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
824 break;
825 }
826 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100827 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100828 TranscendentalCacheStub stub(instr->transcendental_type(),
829 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100830 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100831 break;
832 }
833 default:
834 UNREACHABLE();
835 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100836}
837
838
839void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
840 // Nothing to do.
841}
842
843
844void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100845 if (instr->hydrogen()->HasPowerOf2Divisor()) {
846 Register dividend = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000847 Register result = ToRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +0100848
849 int32_t divisor =
850 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
851
852 if (divisor < 0) divisor = -divisor;
853
854 Label positive_dividend, done;
855 __ cmp(dividend, Operand(0));
856 __ b(pl, &positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000857 __ rsb(result, dividend, Operand(0));
858 __ and_(result, result, Operand(divisor - 1), SetCC);
Steve Block44f0eee2011-05-26 01:26:41 +0100859 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000860 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100861 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000862 __ rsb(result, result, Operand(0));
863 __ b(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100864 __ bind(&positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000865 __ and_(result, dividend, Operand(divisor - 1));
Steve Block44f0eee2011-05-26 01:26:41 +0100866 __ bind(&done);
867 return;
868 }
869
Ben Murdochb8e0da22011-05-16 14:20:40 +0100870 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100871 Register left = ToRegister(instr->InputAt(0));
872 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100873 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100874
Steve Block44f0eee2011-05-26 01:26:41 +0100875 Register scratch = scratch0();
876 Register scratch2 = ToRegister(instr->TempAt(0));
877 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
878 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
879 DwVfpRegister quotient = double_scratch0();
880
Steve Block44f0eee2011-05-26 01:26:41 +0100881 ASSERT(!dividend.is(divisor));
882 ASSERT(!dividend.is(quotient));
883 ASSERT(!divisor.is(quotient));
884 ASSERT(!scratch.is(left));
885 ASSERT(!scratch.is(right));
886 ASSERT(!scratch.is(result));
887
888 Label done, vfp_modulo, both_positive, right_negative;
889
Ben Murdochb8e0da22011-05-16 14:20:40 +0100890 // Check for x % 0.
891 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100892 __ cmp(right, Operand(0));
893 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100894 }
895
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000896 __ Move(result, left);
897
Steve Block44f0eee2011-05-26 01:26:41 +0100898 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100899 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100900 __ b(eq, &done);
901 // Preload right in a vfp register.
902 __ vmov(divisor.low(), right);
903 __ b(lt, &vfp_modulo);
904
905 __ cmp(left, Operand(right));
906 __ b(lt, &done);
907
908 // Check for (positive) power of two on the right hand side.
909 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
910 scratch,
911 &right_negative,
912 &both_positive);
913 // Perform modulo operation (scratch contains right - 1).
914 __ and_(result, scratch, Operand(left));
915 __ b(&done);
916
917 __ bind(&right_negative);
918 // Negate right. The sign of the divisor does not matter.
919 __ rsb(right, right, Operand(0));
920
921 __ bind(&both_positive);
922 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100923 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100924 // left hand side, the left hand side is the result.
925 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100926 __ mov(scratch, left);
927 for (int i = 0; i < kUnfolds; i++) {
928 // Check if the left hand side is less or equal than the
929 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100930 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100931 __ mov(result, scratch, LeaveCC, lt);
932 __ b(lt, &done);
933 // If not, reduce the left hand side by the right hand
934 // side and check again.
935 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
936 }
937
Steve Block44f0eee2011-05-26 01:26:41 +0100938 __ bind(&vfp_modulo);
939 // Load the arguments in VFP registers.
940 // The divisor value is preloaded before. Be careful that 'right' is only live
941 // on entry.
942 __ vmov(dividend.low(), left);
943 // From here on don't use right as it may have been reallocated (for example
944 // to scratch2).
945 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +0100946
Steve Block44f0eee2011-05-26 01:26:41 +0100947 __ vcvt_f64_s32(dividend, dividend.low());
948 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100949
Steve Block44f0eee2011-05-26 01:26:41 +0100950 // We do not care about the sign of the divisor.
951 __ vabs(divisor, divisor);
952 // Compute the quotient and round it to a 32bit integer.
953 __ vdiv(quotient, dividend, divisor);
954 __ vcvt_s32_f64(quotient.low(), quotient);
955 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100956
Steve Block44f0eee2011-05-26 01:26:41 +0100957 // Compute the remainder in result.
958 DwVfpRegister double_scratch = dividend;
959 __ vmul(double_scratch, divisor, quotient);
960 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
961 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100962
Steve Block44f0eee2011-05-26 01:26:41 +0100963 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
964 __ sub(result, left, scratch);
965 } else {
966 Label ok;
967 // Check for -0.
968 __ sub(scratch2, left, scratch, SetCC);
969 __ b(ne, &ok);
970 __ cmp(left, Operand(0));
971 DeoptimizeIf(mi, instr->environment());
972 __ bind(&ok);
973 // Load the result and we are done.
974 __ mov(result, scratch2);
975 }
976
Ben Murdochb8e0da22011-05-16 14:20:40 +0100977 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100978}
979
980
981void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100982 class DeferredDivI: public LDeferredCode {
983 public:
984 DeferredDivI(LCodeGen* codegen, LDivI* instr)
985 : LDeferredCode(codegen), instr_(instr) { }
986 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100987 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100988 }
989 private:
990 LDivI* instr_;
991 };
992
Steve Block1e0659c2011-05-24 12:43:12 +0100993 const Register left = ToRegister(instr->InputAt(0));
994 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100995 const Register scratch = scratch0();
996 const Register result = ToRegister(instr->result());
997
998 // Check for x / 0.
999 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001000 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001001 DeoptimizeIf(eq, instr->environment());
1002 }
1003
1004 // Check for (0 / -x) that will produce negative zero.
1005 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1006 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +01001007 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001008 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01001009 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001010 DeoptimizeIf(mi, instr->environment());
1011 __ bind(&left_not_zero);
1012 }
1013
1014 // Check for (-kMinInt / -1).
1015 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1016 Label left_not_min_int;
1017 __ cmp(left, Operand(kMinInt));
1018 __ b(ne, &left_not_min_int);
1019 __ cmp(right, Operand(-1));
1020 DeoptimizeIf(eq, instr->environment());
1021 __ bind(&left_not_min_int);
1022 }
1023
1024 Label done, deoptimize;
1025 // Test for a few common cases first.
1026 __ cmp(right, Operand(1));
1027 __ mov(result, left, LeaveCC, eq);
1028 __ b(eq, &done);
1029
1030 __ cmp(right, Operand(2));
1031 __ tst(left, Operand(1), eq);
1032 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1033 __ b(eq, &done);
1034
1035 __ cmp(right, Operand(4));
1036 __ tst(left, Operand(3), eq);
1037 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1038 __ b(eq, &done);
1039
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001040 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001041 // to be tagged to Smis. If that is not possible, deoptimize.
1042 DeferredDivI* deferred = new DeferredDivI(this, instr);
1043
1044 __ TrySmiTag(left, &deoptimize, scratch);
1045 __ TrySmiTag(right, &deoptimize, scratch);
1046
1047 __ b(al, deferred->entry());
1048 __ bind(deferred->exit());
1049
1050 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001051 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001052 __ SmiUntag(result);
1053 __ b(&done);
1054
1055 __ bind(&deoptimize);
1056 DeoptimizeIf(al, instr->environment());
1057 __ bind(&done);
1058}
1059
1060
Steve Block1e0659c2011-05-24 12:43:12 +01001061template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001062void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1063 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001064 Register left = ToRegister(instr->InputAt(0));
1065 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001066
Ben Murdoch8b112d22011-06-08 16:22:53 +01001067 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001068 // Move left to r1 and right to r0 for the stub call.
1069 if (left.is(r1)) {
1070 __ Move(r0, right);
1071 } else if (left.is(r0) && right.is(r1)) {
1072 __ Swap(r0, r1, r2);
1073 } else if (left.is(r0)) {
1074 ASSERT(!right.is(r1));
1075 __ mov(r1, r0);
1076 __ mov(r0, right);
1077 } else {
1078 ASSERT(!left.is(r0) && !right.is(r0));
1079 __ mov(r0, right);
1080 __ mov(r1, left);
1081 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001082 BinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001083 __ CallStub(&stub);
1084 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1085 0,
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001086 Safepoint::kNoLazyDeopt);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001087 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001088 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001089}
1090
1091
1092void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001093 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001094 Register result = ToRegister(instr->result());
1095 // Note that result may alias left.
Steve Block1e0659c2011-05-24 12:43:12 +01001096 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001097 LOperand* right_op = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001098
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001099 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1100 bool bailout_on_minus_zero =
1101 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001102
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001103 if (right_op->IsConstantOperand() && !can_overflow) {
1104 // Use optimized code for specific constants.
1105 int32_t constant = ToInteger32(LConstantOperand::cast(right_op));
1106
1107 if (bailout_on_minus_zero && (constant < 0)) {
1108 // The case of a null constant will be handled separately.
1109 // If constant is negative and left is null, the result should be -0.
1110 __ cmp(left, Operand(0));
1111 DeoptimizeIf(eq, instr->environment());
1112 }
1113
1114 switch (constant) {
1115 case -1:
1116 __ rsb(result, left, Operand(0));
1117 break;
1118 case 0:
1119 if (bailout_on_minus_zero) {
1120 // If left is strictly negative and the constant is null, the
1121 // result is -0. Deoptimize if required, otherwise return 0.
1122 __ cmp(left, Operand(0));
1123 DeoptimizeIf(mi, instr->environment());
1124 }
1125 __ mov(result, Operand(0));
1126 break;
1127 case 1:
1128 __ Move(result, left);
1129 break;
1130 default:
1131 // Multiplying by powers of two and powers of two plus or minus
1132 // one can be done faster with shifted operands.
1133 // For other constants we emit standard code.
1134 int32_t mask = constant >> 31;
1135 uint32_t constant_abs = (constant + mask) ^ mask;
1136
1137 if (IsPowerOf2(constant_abs) ||
1138 IsPowerOf2(constant_abs - 1) ||
1139 IsPowerOf2(constant_abs + 1)) {
1140 if (IsPowerOf2(constant_abs)) {
1141 int32_t shift = WhichPowerOf2(constant_abs);
1142 __ mov(result, Operand(left, LSL, shift));
1143 } else if (IsPowerOf2(constant_abs - 1)) {
1144 int32_t shift = WhichPowerOf2(constant_abs - 1);
1145 __ add(result, left, Operand(left, LSL, shift));
1146 } else if (IsPowerOf2(constant_abs + 1)) {
1147 int32_t shift = WhichPowerOf2(constant_abs + 1);
1148 __ rsb(result, left, Operand(left, LSL, shift));
1149 }
1150
1151 // Correct the sign of the result is the constant is negative.
1152 if (constant < 0) __ rsb(result, result, Operand(0));
1153
1154 } else {
1155 // Generate standard code.
1156 __ mov(ip, Operand(constant));
1157 __ mul(result, left, ip);
1158 }
1159 }
1160
Ben Murdochb0fe1622011-05-05 13:52:32 +01001161 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001162 Register right = EmitLoadRegister(right_op, scratch);
1163 if (bailout_on_minus_zero) {
1164 __ orr(ToRegister(instr->TempAt(0)), left, right);
1165 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001166
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001167 if (can_overflow) {
1168 // scratch:result = left * right.
1169 __ smull(result, scratch, left, right);
1170 __ cmp(scratch, Operand(result, ASR, 31));
1171 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001172 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001173 __ mul(result, left, right);
1174 }
1175
1176 if (bailout_on_minus_zero) {
1177 // Bail out if the result is supposed to be negative zero.
1178 Label done;
1179 __ cmp(result, Operand(0));
1180 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001181 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001182 DeoptimizeIf(mi, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001183 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001184 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001185 }
1186}
1187
1188
1189void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001190 LOperand* left_op = instr->InputAt(0);
1191 LOperand* right_op = instr->InputAt(1);
1192 ASSERT(left_op->IsRegister());
1193 Register left = ToRegister(left_op);
1194 Register result = ToRegister(instr->result());
1195 Operand right(no_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001196
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001197 if (right_op->IsStackSlot() || right_op->IsArgument()) {
1198 right = Operand(EmitLoadRegister(right_op, ip));
Steve Block44f0eee2011-05-26 01:26:41 +01001199 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001200 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand());
1201 right = ToOperand(right_op);
Steve Block44f0eee2011-05-26 01:26:41 +01001202 }
1203
Ben Murdochb0fe1622011-05-05 13:52:32 +01001204 switch (instr->op()) {
1205 case Token::BIT_AND:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001206 __ and_(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001207 break;
1208 case Token::BIT_OR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001209 __ orr(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001210 break;
1211 case Token::BIT_XOR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001212 __ eor(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001213 break;
1214 default:
1215 UNREACHABLE();
1216 break;
1217 }
1218}
1219
1220
1221void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001222 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
1223 // result may alias either of them.
1224 LOperand* right_op = instr->InputAt(1);
1225 Register left = ToRegister(instr->InputAt(0));
1226 Register result = ToRegister(instr->result());
Steve Block9fac8402011-05-12 15:51:54 +01001227 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001228 if (right_op->IsRegister()) {
1229 // Mask the right_op operand.
1230 __ and_(scratch, ToRegister(right_op), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001231 switch (instr->op()) {
1232 case Token::SAR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001233 __ mov(result, Operand(left, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001234 break;
1235 case Token::SHR:
1236 if (instr->can_deopt()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001237 __ mov(result, Operand(left, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001238 DeoptimizeIf(mi, instr->environment());
1239 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001240 __ mov(result, Operand(left, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001241 }
1242 break;
1243 case Token::SHL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001244 __ mov(result, Operand(left, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001245 break;
1246 default:
1247 UNREACHABLE();
1248 break;
1249 }
1250 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001251 // Mask the right_op operand.
1252 int value = ToInteger32(LConstantOperand::cast(right_op));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001253 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1254 switch (instr->op()) {
1255 case Token::SAR:
1256 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001257 __ mov(result, Operand(left, ASR, shift_count));
1258 } else {
1259 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001260 }
1261 break;
1262 case Token::SHR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001263 if (shift_count != 0) {
1264 __ mov(result, Operand(left, LSR, shift_count));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001265 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001266 if (instr->can_deopt()) {
1267 __ tst(left, Operand(0x80000000));
1268 DeoptimizeIf(ne, instr->environment());
1269 }
1270 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001271 }
1272 break;
1273 case Token::SHL:
1274 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001275 __ mov(result, Operand(left, LSL, shift_count));
1276 } else {
1277 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001278 }
1279 break;
1280 default:
1281 UNREACHABLE();
1282 break;
1283 }
1284 }
1285}
1286
1287
1288void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001289 LOperand* left = instr->InputAt(0);
1290 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001291 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001292 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1293 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1294
1295 if (right->IsStackSlot() || right->IsArgument()) {
1296 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001297 __ sub(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001298 } else {
1299 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001300 __ sub(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001301 }
1302
1303 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001304 DeoptimizeIf(vs, instr->environment());
1305 }
1306}
1307
1308
1309void LCodeGen::DoConstantI(LConstantI* instr) {
1310 ASSERT(instr->result()->IsRegister());
1311 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1312}
1313
1314
1315void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001316 ASSERT(instr->result()->IsDoubleRegister());
1317 DwVfpRegister result = ToDoubleRegister(instr->result());
1318 double v = instr->value();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001319 __ Vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001320}
1321
1322
1323void LCodeGen::DoConstantT(LConstantT* instr) {
1324 ASSERT(instr->result()->IsRegister());
1325 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1326}
1327
1328
Steve Block9fac8402011-05-12 15:51:54 +01001329void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001330 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001331 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001332 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1333}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001334
Ben Murdochb0fe1622011-05-05 13:52:32 +01001335
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001336void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001337 Register result = ToRegister(instr->result());
1338 Register array = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001339 __ ldr(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001340}
1341
1342
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001343void LCodeGen::DoElementsKind(LElementsKind* instr) {
1344 Register result = ToRegister(instr->result());
1345 Register input = ToRegister(instr->InputAt(0));
1346
1347 // Load map into |result|.
1348 __ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
1349 // Load the map's "bit field 2" into |result|. We only need the first byte,
1350 // but the following bit field extraction takes care of that anyway.
1351 __ ldr(result, FieldMemOperand(result, Map::kBitField2Offset));
1352 // Retrieve elements_kind from bit field 2.
1353 __ ubfx(result, result, Map::kElementsKindShift, Map::kElementsKindBitCount);
1354}
1355
1356
Ben Murdochb0fe1622011-05-05 13:52:32 +01001357void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001358 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001359 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001360 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001361 Label done;
1362
1363 // If the object is a smi return the object.
1364 __ tst(input, Operand(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001365 __ Move(result, input, eq);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001366 __ b(eq, &done);
1367
1368 // If the object is not a value type, return the object.
1369 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001370 __ Move(result, input, ne);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001371 __ b(ne, &done);
1372 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1373
1374 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001375}
1376
1377
1378void LCodeGen::DoBitNotI(LBitNotI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001379 Register input = ToRegister(instr->InputAt(0));
1380 Register result = ToRegister(instr->result());
1381 __ mvn(result, Operand(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001382}
1383
1384
1385void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001386 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001387 __ push(input_reg);
1388 CallRuntime(Runtime::kThrow, 1, instr);
1389
1390 if (FLAG_debug_code) {
1391 __ stop("Unreachable code.");
1392 }
1393}
1394
1395
1396void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001397 LOperand* left = instr->InputAt(0);
1398 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001399 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001400 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1401 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001402
Steve Block44f0eee2011-05-26 01:26:41 +01001403 if (right->IsStackSlot() || right->IsArgument()) {
1404 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001405 __ add(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001406 } else {
1407 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001408 __ add(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001409 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001410
Steve Block44f0eee2011-05-26 01:26:41 +01001411 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001412 DeoptimizeIf(vs, instr->environment());
1413 }
1414}
1415
1416
1417void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001418 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1419 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001420 DoubleRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001421 switch (instr->op()) {
1422 case Token::ADD:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001423 __ vadd(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001424 break;
1425 case Token::SUB:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001426 __ vsub(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001427 break;
1428 case Token::MUL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001429 __ vmul(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001430 break;
1431 case Token::DIV:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001432 __ vdiv(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001433 break;
1434 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001435 // Save r0-r3 on the stack.
1436 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1437
Ben Murdoch257744e2011-11-30 15:57:28 +00001438 __ PrepareCallCFunction(0, 2, scratch0());
1439 __ SetCallCDoubleArguments(left, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001440 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00001441 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1442 0, 2);
Steve Block1e0659c2011-05-24 12:43:12 +01001443 // Move the result in the double result register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001444 __ GetCFunctionDoubleResult(result);
Steve Block1e0659c2011-05-24 12:43:12 +01001445
1446 // Restore r0-r3.
1447 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001448 break;
1449 }
1450 default:
1451 UNREACHABLE();
1452 break;
1453 }
1454}
1455
1456
1457void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001458 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1459 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001460 ASSERT(ToRegister(instr->result()).is(r0));
1461
Ben Murdoch257744e2011-11-30 15:57:28 +00001462 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001463 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001464 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001465}
1466
1467
1468int LCodeGen::GetNextEmittedBlock(int block) {
1469 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1470 LLabel* label = chunk_->GetLabel(i);
1471 if (!label->HasReplacement()) return i;
1472 }
1473 return -1;
1474}
1475
1476
1477void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1478 int next_block = GetNextEmittedBlock(current_block_);
1479 right_block = chunk_->LookupDestination(right_block);
1480 left_block = chunk_->LookupDestination(left_block);
1481
1482 if (right_block == left_block) {
1483 EmitGoto(left_block);
1484 } else if (left_block == next_block) {
1485 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1486 } else if (right_block == next_block) {
1487 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1488 } else {
1489 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1490 __ b(chunk_->GetAssemblyLabel(right_block));
1491 }
1492}
1493
1494
1495void LCodeGen::DoBranch(LBranch* instr) {
1496 int true_block = chunk_->LookupDestination(instr->true_block_id());
1497 int false_block = chunk_->LookupDestination(instr->false_block_id());
1498
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001499 Representation r = instr->hydrogen()->value()->representation();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001500 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001501 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001502 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001503 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001504 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001505 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001506 Register scratch = scratch0();
1507
Ben Murdochb8e0da22011-05-16 14:20:40 +01001508 // Test the double value. Zero and NaN are false.
1509 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1510 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch6d7cb002011-08-04 19:25:22 +01001511 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001512 } else {
1513 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001514 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001515 HType type = instr->hydrogen()->value()->type();
1516 if (type.IsBoolean()) {
1517 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001518 EmitBranch(true_block, false_block, eq);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001519 } else if (type.IsSmi()) {
1520 __ cmp(reg, Operand(0));
1521 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001522 } else {
1523 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1524 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1525
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001526 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1527 // Avoid deopts in the case where we've never executed this path before.
1528 if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001529
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001530 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1531 // undefined -> false.
1532 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1533 __ b(eq, false_label);
1534 }
1535 if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1536 // Boolean -> its value.
1537 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1538 __ b(eq, true_label);
1539 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1540 __ b(eq, false_label);
1541 }
1542 if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1543 // 'null' -> false.
1544 __ CompareRoot(reg, Heap::kNullValueRootIndex);
1545 __ b(eq, false_label);
1546 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001547
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001548 if (expected.Contains(ToBooleanStub::SMI)) {
1549 // Smis: 0 -> false, all other -> true.
1550 __ cmp(reg, Operand(0));
1551 __ b(eq, false_label);
1552 __ JumpIfSmi(reg, true_label);
1553 } else if (expected.NeedsMap()) {
1554 // If we need a map later and have a Smi -> deopt.
1555 __ tst(reg, Operand(kSmiTagMask));
1556 DeoptimizeIf(eq, instr->environment());
1557 }
1558
1559 const Register map = scratch0();
1560 if (expected.NeedsMap()) {
1561 __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset));
1562
1563 if (expected.CanBeUndetectable()) {
1564 // Undetectable -> false.
1565 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
1566 __ tst(ip, Operand(1 << Map::kIsUndetectable));
1567 __ b(ne, false_label);
1568 }
1569 }
1570
1571 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1572 // spec object -> true.
1573 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
1574 __ b(ge, true_label);
1575 }
1576
1577 if (expected.Contains(ToBooleanStub::STRING)) {
1578 // String value -> false iff empty.
1579 Label not_string;
1580 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
1581 __ b(ge, &not_string);
1582 __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset));
1583 __ cmp(ip, Operand(0));
1584 __ b(ne, true_label);
1585 __ b(false_label);
1586 __ bind(&not_string);
1587 }
1588
1589 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1590 // heap number -> false iff +0, -0, or NaN.
1591 DoubleRegister dbl_scratch = double_scratch0();
1592 Label not_heap_number;
1593 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1594 __ b(ne, &not_heap_number);
1595 __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
1596 __ VFPCompareAndSetFlags(dbl_scratch, 0.0);
1597 __ b(vs, false_label); // NaN -> false.
1598 __ b(eq, false_label); // +0, -0 -> false.
1599 __ b(true_label);
1600 __ bind(&not_heap_number);
1601 }
1602
1603 // We've seen something for the first time -> deopt.
1604 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001605 }
1606 }
1607}
1608
1609
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001610void LCodeGen::EmitGoto(int block) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001611 block = chunk_->LookupDestination(block);
1612 int next_block = GetNextEmittedBlock(current_block_);
1613 if (block != next_block) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001614 __ jmp(chunk_->GetAssemblyLabel(block));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001615 }
1616}
1617
1618
Ben Murdochb0fe1622011-05-05 13:52:32 +01001619void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001620 EmitGoto(instr->block_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001621}
1622
1623
1624Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001625 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001626 switch (op) {
1627 case Token::EQ:
1628 case Token::EQ_STRICT:
1629 cond = eq;
1630 break;
1631 case Token::LT:
1632 cond = is_unsigned ? lo : lt;
1633 break;
1634 case Token::GT:
1635 cond = is_unsigned ? hi : gt;
1636 break;
1637 case Token::LTE:
1638 cond = is_unsigned ? ls : le;
1639 break;
1640 case Token::GTE:
1641 cond = is_unsigned ? hs : ge;
1642 break;
1643 case Token::IN:
1644 case Token::INSTANCEOF:
1645 default:
1646 UNREACHABLE();
1647 }
1648 return cond;
1649}
1650
1651
1652void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001653 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001654}
1655
1656
Ben Murdochb0fe1622011-05-05 13:52:32 +01001657void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001658 LOperand* left = instr->InputAt(0);
1659 LOperand* right = instr->InputAt(1);
1660 int false_block = chunk_->LookupDestination(instr->false_block_id());
1661 int true_block = chunk_->LookupDestination(instr->true_block_id());
1662
1663 if (instr->is_double()) {
1664 // Compare left and right as doubles and load the
1665 // resulting flags into the normal status register.
1666 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1667 // If a NaN is involved, i.e. the result is unordered (V set),
1668 // jump to false block label.
1669 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1670 } else {
1671 EmitCmpI(left, right);
1672 }
1673
1674 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1675 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001676}
1677
1678
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001679void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001680 Register left = ToRegister(instr->InputAt(0));
1681 Register right = ToRegister(instr->InputAt(1));
1682 int false_block = chunk_->LookupDestination(instr->false_block_id());
1683 int true_block = chunk_->LookupDestination(instr->true_block_id());
1684
1685 __ cmp(left, Operand(right));
1686 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001687}
1688
1689
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001690void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001691 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001692 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001693 int false_block = chunk_->LookupDestination(instr->false_block_id());
Ben Murdoch257744e2011-11-30 15:57:28 +00001694
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001695 __ cmp(left, Operand(instr->hydrogen()->right()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001696 EmitBranch(true_block, false_block, eq);
1697}
1698
1699
Ben Murdochb0fe1622011-05-05 13:52:32 +01001700void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001701 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001702 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001703
1704 // TODO(fsc): If the expression is known to be a smi, then it's
1705 // definitely not null. Jump to the false block.
1706
1707 int true_block = chunk_->LookupDestination(instr->true_block_id());
1708 int false_block = chunk_->LookupDestination(instr->false_block_id());
1709
1710 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1711 __ cmp(reg, ip);
1712 if (instr->is_strict()) {
1713 EmitBranch(true_block, false_block, eq);
1714 } else {
1715 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1716 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1717 __ b(eq, true_label);
1718 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1719 __ cmp(reg, ip);
1720 __ b(eq, true_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001721 __ JumpIfSmi(reg, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001722 // Check for undetectable objects by looking in the bit field in
1723 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001724 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1725 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1726 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1727 EmitBranch(true_block, false_block, ne);
1728 }
1729}
1730
1731
1732Condition LCodeGen::EmitIsObject(Register input,
1733 Register temp1,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001734 Label* is_not_object,
1735 Label* is_object) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001736 Register temp2 = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001737 __ JumpIfSmi(input, is_not_object);
1738
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001739 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
1740 __ cmp(input, temp2);
Steve Block1e0659c2011-05-24 12:43:12 +01001741 __ b(eq, is_object);
1742
1743 // Load map.
1744 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1745 // Undetectable objects behave like undefined.
1746 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1747 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1748 __ b(ne, is_not_object);
1749
1750 // Load instance type and check that it is in object type range.
1751 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001752 __ cmp(temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001753 __ b(lt, is_not_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001754 __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001755 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001756}
1757
1758
Ben Murdochb0fe1622011-05-05 13:52:32 +01001759void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001760 Register reg = ToRegister(instr->InputAt(0));
1761 Register temp1 = ToRegister(instr->TempAt(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001762
1763 int true_block = chunk_->LookupDestination(instr->true_block_id());
1764 int false_block = chunk_->LookupDestination(instr->false_block_id());
1765 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1766 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1767
1768 Condition true_cond =
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001769 EmitIsObject(reg, temp1, false_label, true_label);
Steve Block1e0659c2011-05-24 12:43:12 +01001770
1771 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001772}
1773
1774
Ben Murdochb0fe1622011-05-05 13:52:32 +01001775void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1776 int true_block = chunk_->LookupDestination(instr->true_block_id());
1777 int false_block = chunk_->LookupDestination(instr->false_block_id());
1778
Steve Block1e0659c2011-05-24 12:43:12 +01001779 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001780 __ tst(input_reg, Operand(kSmiTagMask));
1781 EmitBranch(true_block, false_block, eq);
1782}
1783
1784
Ben Murdoch257744e2011-11-30 15:57:28 +00001785void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1786 Register input = ToRegister(instr->InputAt(0));
1787 Register temp = ToRegister(instr->TempAt(0));
1788
1789 int true_block = chunk_->LookupDestination(instr->true_block_id());
1790 int false_block = chunk_->LookupDestination(instr->false_block_id());
1791
1792 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1793 __ ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
1794 __ ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
1795 __ tst(temp, Operand(1 << Map::kIsUndetectable));
1796 EmitBranch(true_block, false_block, ne);
1797}
1798
1799
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001800static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001801 InstanceType from = instr->from();
1802 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001803 if (from == FIRST_TYPE) return to;
1804 ASSERT(from == to || to == LAST_TYPE);
1805 return from;
1806}
1807
1808
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001809static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001810 InstanceType from = instr->from();
1811 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001812 if (from == to) return eq;
1813 if (to == LAST_TYPE) return hs;
1814 if (from == FIRST_TYPE) return ls;
1815 UNREACHABLE();
1816 return eq;
1817}
1818
1819
Ben Murdochb0fe1622011-05-05 13:52:32 +01001820void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001821 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001822 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001823
1824 int true_block = chunk_->LookupDestination(instr->true_block_id());
1825 int false_block = chunk_->LookupDestination(instr->false_block_id());
1826
1827 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1828
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001829 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001830
Steve Block1e0659c2011-05-24 12:43:12 +01001831 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1832 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001833}
1834
1835
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001836void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1837 Register input = ToRegister(instr->InputAt(0));
1838 Register result = ToRegister(instr->result());
1839
1840 if (FLAG_debug_code) {
1841 __ AbortIfNotString(input);
1842 }
1843
1844 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1845 __ IndexFromHash(result, result);
1846}
1847
1848
Ben Murdochb0fe1622011-05-05 13:52:32 +01001849void LCodeGen::DoHasCachedArrayIndexAndBranch(
1850 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001851 Register input = ToRegister(instr->InputAt(0));
1852 Register scratch = scratch0();
1853
1854 int true_block = chunk_->LookupDestination(instr->true_block_id());
1855 int false_block = chunk_->LookupDestination(instr->false_block_id());
1856
1857 __ ldr(scratch,
1858 FieldMemOperand(input, String::kHashFieldOffset));
1859 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1860 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001861}
1862
1863
Ben Murdochb8e0da22011-05-16 14:20:40 +01001864// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001865// the temp registers, but not the input. Only input and temp2 may alias.
1866void LCodeGen::EmitClassOfTest(Label* is_true,
1867 Label* is_false,
1868 Handle<String>class_name,
1869 Register input,
1870 Register temp,
1871 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001872 ASSERT(!input.is(temp));
1873 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001874 __ JumpIfSmi(input, is_false);
1875 __ CompareObjectType(input, temp, temp2, FIRST_SPEC_OBJECT_TYPE);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001876 __ b(lt, is_false);
1877
1878 // Map is now in temp.
1879 // Functions have class 'Function'.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001880 __ CompareInstanceType(temp, temp2, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001881 if (class_name->IsEqualTo(CStrVector("Function"))) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001882 __ b(ge, is_true);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001883 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001884 __ b(ge, is_false);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001885 }
1886
1887 // Check if the constructor in the map is a function.
1888 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1889
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001890 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type and
1891 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1892 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1893 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1894 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1895 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001896
1897 // Objects with a non-function constructor have class 'Object'.
1898 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1899 if (class_name->IsEqualTo(CStrVector("Object"))) {
1900 __ b(ne, is_true);
1901 } else {
1902 __ b(ne, is_false);
1903 }
1904
1905 // temp now contains the constructor function. Grab the
1906 // instance class name from there.
1907 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1908 __ ldr(temp, FieldMemOperand(temp,
1909 SharedFunctionInfo::kInstanceClassNameOffset));
1910 // The class name we are testing against is a symbol because it's a literal.
1911 // The name in the constructor is a symbol because of the way the context is
1912 // booted. This routine isn't expected to work for random API-created
1913 // classes and it doesn't have to because you can't access it with natives
1914 // syntax. Since both sides are symbols it is sufficient to use an identity
1915 // comparison.
1916 __ cmp(temp, Operand(class_name));
1917 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001918}
1919
1920
Ben Murdochb0fe1622011-05-05 13:52:32 +01001921void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001922 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001923 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001924 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001925 Handle<String> class_name = instr->hydrogen()->class_name();
1926
1927 int true_block = chunk_->LookupDestination(instr->true_block_id());
1928 int false_block = chunk_->LookupDestination(instr->false_block_id());
1929
1930 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1931 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1932
1933 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1934
1935 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001936}
1937
1938
1939void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001940 Register reg = ToRegister(instr->InputAt(0));
1941 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001942 int true_block = instr->true_block_id();
1943 int false_block = instr->false_block_id();
1944
1945 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1946 __ cmp(temp, Operand(instr->map()));
1947 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001948}
1949
1950
1951void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001952 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1953 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01001954
Ben Murdochb0fe1622011-05-05 13:52:32 +01001955 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1956 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1957
Steve Block44f0eee2011-05-26 01:26:41 +01001958 __ cmp(r0, Operand(0));
1959 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
1960 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001961}
1962
1963
Ben Murdoch086aeea2011-05-13 15:57:08 +01001964void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001965 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1966 public:
1967 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1968 LInstanceOfKnownGlobal* instr)
1969 : LDeferredCode(codegen), instr_(instr) { }
1970 virtual void Generate() {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001971 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
Steve Block1e0659c2011-05-24 12:43:12 +01001972 }
1973
1974 Label* map_check() { return &map_check_; }
1975
1976 private:
1977 LInstanceOfKnownGlobal* instr_;
1978 Label map_check_;
1979 };
1980
1981 DeferredInstanceOfKnownGlobal* deferred;
1982 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1983
1984 Label done, false_result;
1985 Register object = ToRegister(instr->InputAt(0));
1986 Register temp = ToRegister(instr->TempAt(0));
1987 Register result = ToRegister(instr->result());
1988
1989 ASSERT(object.is(r0));
1990 ASSERT(result.is(r0));
1991
1992 // A Smi is not instance of anything.
1993 __ JumpIfSmi(object, &false_result);
1994
1995 // This is the inlined call site instanceof cache. The two occurences of the
1996 // hole value will be patched to the last map/result pair generated by the
1997 // instanceof stub.
1998 Label cache_miss;
1999 Register map = temp;
2000 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2001 __ bind(deferred->map_check()); // Label for calculating code patching.
2002 // We use Factory::the_hole_value() on purpose instead of loading from the
2003 // root array to force relocation to be able to later patch with
2004 // the cached map.
Steve Block44f0eee2011-05-26 01:26:41 +01002005 __ mov(ip, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002006 __ cmp(map, Operand(ip));
2007 __ b(ne, &cache_miss);
2008 // We use Factory::the_hole_value() on purpose instead of loading from the
2009 // root array to force relocation to be able to later patch
2010 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002011 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002012 __ b(&done);
2013
2014 // The inlined call site cache did not match. Check null and string before
2015 // calling the deferred code.
2016 __ bind(&cache_miss);
2017 // Null is not instance of anything.
2018 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2019 __ cmp(object, Operand(ip));
2020 __ b(eq, &false_result);
2021
2022 // String values is not instance of anything.
2023 Condition is_string = masm_->IsObjectStringType(object, temp);
2024 __ b(is_string, &false_result);
2025
2026 // Go to the deferred code.
2027 __ b(deferred->entry());
2028
2029 __ bind(&false_result);
2030 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2031
2032 // Here result has either true or false. Deferred code also produces true or
2033 // false object.
2034 __ bind(deferred->exit());
2035 __ bind(&done);
2036}
2037
2038
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002039void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2040 Label* map_check) {
Steve Block1e0659c2011-05-24 12:43:12 +01002041 Register result = ToRegister(instr->result());
2042 ASSERT(result.is(r0));
2043
2044 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2045 flags = static_cast<InstanceofStub::Flags>(
2046 flags | InstanceofStub::kArgsInRegisters);
2047 flags = static_cast<InstanceofStub::Flags>(
2048 flags | InstanceofStub::kCallSiteInlineCheck);
2049 flags = static_cast<InstanceofStub::Flags>(
2050 flags | InstanceofStub::kReturnTrueFalseObject);
2051 InstanceofStub stub(flags);
2052
Ben Murdoch8b112d22011-06-08 16:22:53 +01002053 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002054
2055 // Get the temp register reserved by the instruction. This needs to be r4 as
2056 // its slot of the pushing of safepoint registers is used to communicate the
2057 // offset to the location of the map check.
2058 Register temp = ToRegister(instr->TempAt(0));
2059 ASSERT(temp.is(r4));
2060 __ mov(InstanceofStub::right(), Operand(instr->function()));
2061 static const int kAdditionalDelta = 4;
2062 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2063 Label before_push_delta;
2064 __ bind(&before_push_delta);
2065 __ BlockConstPoolFor(kAdditionalDelta);
2066 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002067 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002068 CallCodeGeneric(stub.GetCode(),
2069 RelocInfo::CODE_TARGET,
2070 instr,
2071 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002072 ASSERT(instr->HasDeoptimizationEnvironment());
2073 LEnvironment* env = instr->deoptimization_environment();
2074 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Steve Block1e0659c2011-05-24 12:43:12 +01002075 // Put the result value into the result register slot and
2076 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002077 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002078}
2079
Ben Murdochb0fe1622011-05-05 13:52:32 +01002080
2081static Condition ComputeCompareCondition(Token::Value op) {
2082 switch (op) {
2083 case Token::EQ_STRICT:
2084 case Token::EQ:
2085 return eq;
2086 case Token::LT:
2087 return lt;
2088 case Token::GT:
2089 return gt;
2090 case Token::LTE:
2091 return le;
2092 case Token::GTE:
2093 return ge;
2094 default:
2095 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002096 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002097 }
2098}
2099
2100
2101void LCodeGen::DoCmpT(LCmpT* instr) {
2102 Token::Value op = instr->op();
2103
2104 Handle<Code> ic = CompareIC::GetUninitialized(op);
2105 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002106 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002107
2108 Condition condition = ComputeCompareCondition(op);
2109 if (op == Token::GT || op == Token::LTE) {
2110 condition = ReverseCondition(condition);
2111 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002112 __ LoadRoot(ToRegister(instr->result()),
2113 Heap::kTrueValueRootIndex,
2114 condition);
2115 __ LoadRoot(ToRegister(instr->result()),
2116 Heap::kFalseValueRootIndex,
2117 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002118}
2119
2120
Ben Murdochb0fe1622011-05-05 13:52:32 +01002121void LCodeGen::DoReturn(LReturn* instr) {
2122 if (FLAG_trace) {
2123 // Push the return value on the stack as the parameter.
2124 // Runtime::TraceExit returns its parameter in r0.
2125 __ push(r0);
2126 __ CallRuntime(Runtime::kTraceExit, 1);
2127 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002128 int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002129 __ mov(sp, fp);
2130 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2131 __ add(sp, sp, Operand(sp_delta));
2132 __ Jump(lr);
2133}
2134
2135
Ben Murdoch8b112d22011-06-08 16:22:53 +01002136void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002137 Register result = ToRegister(instr->result());
2138 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2139 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2140 if (instr->hydrogen()->check_hole_value()) {
2141 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2142 __ cmp(result, ip);
2143 DeoptimizeIf(eq, instr->environment());
2144 }
2145}
2146
2147
Ben Murdoch8b112d22011-06-08 16:22:53 +01002148void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2149 ASSERT(ToRegister(instr->global_object()).is(r0));
2150 ASSERT(ToRegister(instr->result()).is(r0));
2151
2152 __ mov(r2, Operand(instr->name()));
2153 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2154 : RelocInfo::CODE_TARGET_CONTEXT;
2155 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2156 CallCode(ic, mode, instr);
2157}
2158
2159
2160void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002161 Register value = ToRegister(instr->InputAt(0));
2162 Register scratch = scratch0();
2163
2164 // Load the cell.
2165 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2166
2167 // If the cell we are storing to contains the hole it could have
2168 // been deleted from the property dictionary. In that case, we need
2169 // to update the property details in the property dictionary to mark
2170 // it as no longer deleted.
2171 if (instr->hydrogen()->check_hole_value()) {
2172 Register scratch2 = ToRegister(instr->TempAt(0));
2173 __ ldr(scratch2,
2174 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2175 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2176 __ cmp(scratch2, ip);
2177 DeoptimizeIf(eq, instr->environment());
2178 }
2179
2180 // Store the value.
2181 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002182}
2183
2184
Ben Murdoch8b112d22011-06-08 16:22:53 +01002185void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2186 ASSERT(ToRegister(instr->global_object()).is(r1));
2187 ASSERT(ToRegister(instr->value()).is(r0));
2188
2189 __ mov(r2, Operand(instr->name()));
2190 Handle<Code> ic = instr->strict_mode()
2191 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2192 : isolate()->builtins()->StoreIC_Initialize();
2193 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2194}
2195
2196
Ben Murdochb8e0da22011-05-16 14:20:40 +01002197void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002198 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002199 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002200 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002201}
2202
2203
Steve Block1e0659c2011-05-24 12:43:12 +01002204void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2205 Register context = ToRegister(instr->context());
2206 Register value = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002207 __ str(value, ContextOperand(context, instr->slot_index()));
2208 if (instr->needs_write_barrier()) {
2209 int offset = Context::SlotOffset(instr->slot_index());
2210 __ RecordWrite(context, Operand(offset), value, scratch0());
2211 }
2212}
2213
2214
Ben Murdochb0fe1622011-05-05 13:52:32 +01002215void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002216 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002217 Register result = ToRegister(instr->result());
2218 if (instr->hydrogen()->is_in_object()) {
2219 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2220 } else {
2221 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2222 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2223 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002224}
2225
2226
Ben Murdoch257744e2011-11-30 15:57:28 +00002227void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2228 Register object,
2229 Handle<Map> type,
2230 Handle<String> name) {
Steve Block44f0eee2011-05-26 01:26:41 +01002231 LookupResult lookup;
2232 type->LookupInDescriptors(NULL, *name, &lookup);
Ben Murdoch257744e2011-11-30 15:57:28 +00002233 ASSERT(lookup.IsProperty() &&
2234 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2235 if (lookup.type() == FIELD) {
2236 int index = lookup.GetLocalFieldIndexFromMap(*type);
2237 int offset = index * kPointerSize;
2238 if (index < 0) {
2239 // Negative property indices are in-object properties, indexed
2240 // from the end of the fixed part of the object.
2241 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2242 } else {
2243 // Non-negative property indices are in the properties array.
2244 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2245 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2246 }
Steve Block44f0eee2011-05-26 01:26:41 +01002247 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002248 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
2249 LoadHeapObject(result, Handle<HeapObject>::cast(function));
Steve Block44f0eee2011-05-26 01:26:41 +01002250 }
2251}
2252
2253
2254void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2255 Register object = ToRegister(instr->object());
2256 Register result = ToRegister(instr->result());
2257 Register scratch = scratch0();
2258 int map_count = instr->hydrogen()->types()->length();
2259 Handle<String> name = instr->hydrogen()->name();
2260 if (map_count == 0) {
2261 ASSERT(instr->hydrogen()->need_generic());
2262 __ mov(r2, Operand(name));
2263 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2264 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2265 } else {
2266 Label done;
2267 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2268 for (int i = 0; i < map_count - 1; ++i) {
2269 Handle<Map> map = instr->hydrogen()->types()->at(i);
2270 Label next;
2271 __ cmp(scratch, Operand(map));
2272 __ b(ne, &next);
Ben Murdoch257744e2011-11-30 15:57:28 +00002273 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002274 __ b(&done);
2275 __ bind(&next);
2276 }
2277 Handle<Map> map = instr->hydrogen()->types()->last();
2278 __ cmp(scratch, Operand(map));
2279 if (instr->hydrogen()->need_generic()) {
2280 Label generic;
2281 __ b(ne, &generic);
Ben Murdoch257744e2011-11-30 15:57:28 +00002282 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002283 __ b(&done);
2284 __ bind(&generic);
2285 __ mov(r2, Operand(name));
2286 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2287 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2288 } else {
2289 DeoptimizeIf(ne, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002290 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002291 }
2292 __ bind(&done);
2293 }
2294}
2295
2296
Ben Murdochb0fe1622011-05-05 13:52:32 +01002297void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2298 ASSERT(ToRegister(instr->object()).is(r0));
2299 ASSERT(ToRegister(instr->result()).is(r0));
2300
2301 // Name is always in r2.
2302 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002303 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002304 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2305}
2306
2307
Steve Block9fac8402011-05-12 15:51:54 +01002308void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2309 Register scratch = scratch0();
2310 Register function = ToRegister(instr->function());
2311 Register result = ToRegister(instr->result());
2312
2313 // Check that the function really is a function. Load map into the
2314 // result register.
2315 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2316 DeoptimizeIf(ne, instr->environment());
2317
2318 // Make sure that the function has an instance prototype.
2319 Label non_instance;
2320 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2321 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2322 __ b(ne, &non_instance);
2323
2324 // Get the prototype or initial map from the function.
2325 __ ldr(result,
2326 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2327
2328 // Check that the function has a prototype or an initial map.
2329 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2330 __ cmp(result, ip);
2331 DeoptimizeIf(eq, instr->environment());
2332
2333 // If the function does not have an initial map, we're done.
2334 Label done;
2335 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2336 __ b(ne, &done);
2337
2338 // Get the prototype from the initial map.
2339 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2340 __ jmp(&done);
2341
2342 // Non-instance prototype: Fetch prototype from constructor field
2343 // in initial map.
2344 __ bind(&non_instance);
2345 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2346
2347 // All done.
2348 __ bind(&done);
2349}
2350
2351
Ben Murdochb0fe1622011-05-05 13:52:32 +01002352void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002353 Register result = ToRegister(instr->result());
2354 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002355 Register scratch = scratch0();
2356
Steve Block1e0659c2011-05-24 12:43:12 +01002357 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002358 if (FLAG_debug_code) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002359 Label done, fail;
Steve Block1e0659c2011-05-24 12:43:12 +01002360 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002361 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2362 __ cmp(scratch, ip);
2363 __ b(eq, &done);
2364 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2365 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002366 __ b(eq, &done);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002367 // |scratch| still contains |input|'s map.
2368 __ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
2369 __ ubfx(scratch, scratch, Map::kElementsKindShift,
2370 Map::kElementsKindBitCount);
Ben Murdoch589d6972011-11-30 16:04:58 +00002371 __ cmp(scratch, Operand(FAST_ELEMENTS));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002372 __ b(eq, &done);
Ben Murdoch589d6972011-11-30 16:04:58 +00002373 __ cmp(scratch, Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002374 __ b(lt, &fail);
Ben Murdoch589d6972011-11-30 16:04:58 +00002375 __ cmp(scratch, Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002376 __ b(le, &done);
2377 __ bind(&fail);
2378 __ Abort("Check for fast or external elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002379 __ bind(&done);
2380 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002381}
2382
2383
Steve Block44f0eee2011-05-26 01:26:41 +01002384void LCodeGen::DoLoadExternalArrayPointer(
2385 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002386 Register to_reg = ToRegister(instr->result());
2387 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002388 __ ldr(to_reg, FieldMemOperand(from_reg,
2389 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002390}
2391
2392
Ben Murdochb0fe1622011-05-05 13:52:32 +01002393void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002394 Register arguments = ToRegister(instr->arguments());
2395 Register length = ToRegister(instr->length());
2396 Register index = ToRegister(instr->index());
2397 Register result = ToRegister(instr->result());
2398
2399 // Bailout index is not a valid argument index. Use unsigned check to get
2400 // negative check for free.
2401 __ sub(length, length, index, SetCC);
2402 DeoptimizeIf(ls, instr->environment());
2403
2404 // There are two words between the frame pointer and the last argument.
2405 // Subtracting from length accounts for one of them add one more.
2406 __ add(length, length, Operand(1));
2407 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002408}
2409
2410
2411void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002412 Register elements = ToRegister(instr->elements());
2413 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002414 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002415 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01002416
2417 // Load the result.
2418 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2419 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2420
Ben Murdochb8e0da22011-05-16 14:20:40 +01002421 // Check for the hole value.
Ben Murdoch257744e2011-11-30 15:57:28 +00002422 if (instr->hydrogen()->RequiresHoleCheck()) {
2423 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2424 __ cmp(result, scratch);
2425 DeoptimizeIf(eq, instr->environment());
2426 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002427}
2428
2429
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002430void LCodeGen::DoLoadKeyedFastDoubleElement(
2431 LLoadKeyedFastDoubleElement* instr) {
2432 Register elements = ToRegister(instr->elements());
2433 bool key_is_constant = instr->key()->IsConstantOperand();
2434 Register key = no_reg;
2435 DwVfpRegister result = ToDoubleRegister(instr->result());
2436 Register scratch = scratch0();
2437
2438 int shift_size =
Ben Murdoch589d6972011-11-30 16:04:58 +00002439 ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002440 int constant_key = 0;
2441 if (key_is_constant) {
2442 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2443 if (constant_key & 0xF0000000) {
2444 Abort("array index constant value too big.");
2445 }
2446 } else {
2447 key = ToRegister(instr->key());
2448 }
2449
2450 Operand operand = key_is_constant
2451 ? Operand(constant_key * (1 << shift_size) +
2452 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
2453 : Operand(key, LSL, shift_size);
2454 __ add(elements, elements, operand);
2455 if (!key_is_constant) {
2456 __ add(elements, elements,
2457 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2458 }
2459
2460 if (instr->hydrogen()->RequiresHoleCheck()) {
2461 // TODO(danno): If no hole check is required, there is no need to allocate
2462 // elements into a temporary register, instead scratch can be used.
2463 __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2464 __ cmp(scratch, Operand(kHoleNanUpper32));
2465 DeoptimizeIf(eq, instr->environment());
2466 }
2467
2468 __ vldr(result, elements, 0);
2469}
2470
2471
Steve Block44f0eee2011-05-26 01:26:41 +01002472void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2473 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002474 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00002475 Register key = no_reg;
Ben Murdoch589d6972011-11-30 16:04:58 +00002476 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00002477 bool key_is_constant = instr->key()->IsConstantOperand();
2478 int constant_key = 0;
2479 if (key_is_constant) {
2480 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2481 if (constant_key & 0xF0000000) {
2482 Abort("array index constant value too big.");
2483 }
2484 } else {
2485 key = ToRegister(instr->key());
2486 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002487 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00002488
Ben Murdoch589d6972011-11-30 16:04:58 +00002489 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
2490 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002491 CpuFeatures::Scope scope(VFP3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002492 DwVfpRegister result = ToDoubleRegister(instr->result());
2493 Operand operand = key_is_constant
2494 ? Operand(constant_key * (1 << shift_size))
2495 : Operand(key, LSL, shift_size);
Ben Murdoch257744e2011-11-30 15:57:28 +00002496 __ add(scratch0(), external_pointer, operand);
Ben Murdoch589d6972011-11-30 16:04:58 +00002497 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002498 __ vldr(result.low(), scratch0(), 0);
2499 __ vcvt_f64_f32(result, result.low());
Ben Murdoch589d6972011-11-30 16:04:58 +00002500 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00002501 __ vldr(result, scratch0(), 0);
2502 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01002503 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002504 Register result = ToRegister(instr->result());
Ben Murdoch257744e2011-11-30 15:57:28 +00002505 MemOperand mem_operand(key_is_constant
2506 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
2507 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002508 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002509 case EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002510 __ ldrsb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002511 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002512 case EXTERNAL_PIXEL_ELEMENTS:
2513 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002514 __ ldrb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002515 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002516 case EXTERNAL_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002517 __ ldrsh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002518 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002519 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002520 __ ldrh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002521 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002522 case EXTERNAL_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002523 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002524 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002525 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002526 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002527 __ cmp(result, Operand(0x80000000));
2528 // TODO(danno): we could be more clever here, perhaps having a special
2529 // version of the stub that detects if the overflow case actually
2530 // happens, and generate code that returns a double rather than int.
2531 DeoptimizeIf(cs, instr->environment());
2532 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002533 case EXTERNAL_FLOAT_ELEMENTS:
2534 case EXTERNAL_DOUBLE_ELEMENTS:
2535 case FAST_DOUBLE_ELEMENTS:
2536 case FAST_ELEMENTS:
2537 case DICTIONARY_ELEMENTS:
2538 case NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01002539 UNREACHABLE();
2540 break;
2541 }
2542 }
Steve Block1e0659c2011-05-24 12:43:12 +01002543}
2544
2545
Ben Murdochb0fe1622011-05-05 13:52:32 +01002546void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2547 ASSERT(ToRegister(instr->object()).is(r1));
2548 ASSERT(ToRegister(instr->key()).is(r0));
2549
Steve Block44f0eee2011-05-26 01:26:41 +01002550 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002551 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2552}
2553
2554
2555void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002556 Register scratch = scratch0();
2557 Register result = ToRegister(instr->result());
2558
2559 // Check if the calling frame is an arguments adaptor frame.
2560 Label done, adapted;
2561 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2562 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2563 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2564
2565 // Result is the frame pointer for the frame if not adapted and for the real
2566 // frame below the adaptor frame if adapted.
2567 __ mov(result, fp, LeaveCC, ne);
2568 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002569}
2570
2571
2572void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002573 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002574 Register result = ToRegister(instr->result());
2575
2576 Label done;
2577
2578 // If no arguments adaptor frame the number of arguments is fixed.
2579 __ cmp(fp, elem);
2580 __ mov(result, Operand(scope()->num_parameters()));
2581 __ b(eq, &done);
2582
2583 // Arguments adaptor frame present. Get argument length from there.
2584 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2585 __ ldr(result,
2586 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2587 __ SmiUntag(result);
2588
2589 // Argument length is in result register.
2590 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002591}
2592
2593
2594void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002595 Register receiver = ToRegister(instr->receiver());
2596 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002597 Register length = ToRegister(instr->length());
2598 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002599 Register scratch = scratch0();
2600 ASSERT(receiver.is(r0)); // Used for parameter count.
2601 ASSERT(function.is(r1)); // Required by InvokeFunction.
2602 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002603
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002604 // If the receiver is null or undefined, we have to pass the global
2605 // object as a receiver to normal functions. Values have to be
2606 // passed unchanged to builtins and strict-mode functions.
Steve Block1e0659c2011-05-24 12:43:12 +01002607 Label global_object, receiver_ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002608
2609 // Do not transform the receiver to object for strict mode
2610 // functions.
2611 __ ldr(scratch,
2612 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2613 __ ldr(scratch,
2614 FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
2615 __ tst(scratch,
2616 Operand(1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize)));
2617 __ b(ne, &receiver_ok);
2618
2619 // Do not transform the receiver to object for builtins.
2620 __ tst(scratch, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
2621 __ b(ne, &receiver_ok);
2622
2623 // Normal function. Replace undefined or null with global receiver.
Steve Block1e0659c2011-05-24 12:43:12 +01002624 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2625 __ cmp(receiver, scratch);
2626 __ b(eq, &global_object);
2627 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2628 __ cmp(receiver, scratch);
2629 __ b(eq, &global_object);
2630
2631 // Deoptimize if the receiver is not a JS object.
2632 __ tst(receiver, Operand(kSmiTagMask));
2633 DeoptimizeIf(eq, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002634 __ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE);
2635 DeoptimizeIf(lt, instr->environment());
Steve Block1e0659c2011-05-24 12:43:12 +01002636 __ jmp(&receiver_ok);
2637
2638 __ bind(&global_object);
2639 __ ldr(receiver, GlobalObjectOperand());
Ben Murdoch257744e2011-11-30 15:57:28 +00002640 __ ldr(receiver,
2641 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002642 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002643
2644 // Copy the arguments to this function possibly from the
2645 // adaptor frame below it.
2646 const uint32_t kArgumentsLimit = 1 * KB;
2647 __ cmp(length, Operand(kArgumentsLimit));
2648 DeoptimizeIf(hi, instr->environment());
2649
2650 // Push the receiver and use the register to keep the original
2651 // number of arguments.
2652 __ push(receiver);
2653 __ mov(receiver, length);
2654 // The arguments are at a one pointer size offset from elements.
2655 __ add(elements, elements, Operand(1 * kPointerSize));
2656
2657 // Loop through the arguments pushing them onto the execution
2658 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002659 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002660 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002661 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002662 __ b(eq, &invoke);
2663 __ bind(&loop);
2664 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2665 __ push(scratch);
2666 __ sub(length, length, Operand(1), SetCC);
2667 __ b(ne, &loop);
2668
2669 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002670 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2671 LPointerMap* pointers = instr->pointer_map();
Steve Block1e0659c2011-05-24 12:43:12 +01002672 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002673 SafepointGenerator safepoint_generator(
2674 this, pointers, Safepoint::kLazyDeopt);
Steve Block1e0659c2011-05-24 12:43:12 +01002675 // The number of arguments is stored in receiver which is r0, as expected
2676 // by InvokeFunction.
2677 v8::internal::ParameterCount actual(receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00002678 __ InvokeFunction(function, actual, CALL_FUNCTION,
2679 safepoint_generator, CALL_AS_METHOD);
Steve Block1e0659c2011-05-24 12:43:12 +01002680 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002681}
2682
2683
2684void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002685 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002686 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2687 Abort("DoPushArgument not implemented for double type.");
2688 } else {
2689 Register argument_reg = EmitLoadRegister(argument, ip);
2690 __ push(argument_reg);
2691 }
2692}
2693
2694
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002695void LCodeGen::DoThisFunction(LThisFunction* instr) {
2696 Register result = ToRegister(instr->result());
2697 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2698}
2699
2700
Steve Block1e0659c2011-05-24 12:43:12 +01002701void LCodeGen::DoContext(LContext* instr) {
2702 Register result = ToRegister(instr->result());
2703 __ mov(result, cp);
2704}
2705
2706
2707void LCodeGen::DoOuterContext(LOuterContext* instr) {
2708 Register context = ToRegister(instr->context());
2709 Register result = ToRegister(instr->result());
2710 __ ldr(result,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002711 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block1e0659c2011-05-24 12:43:12 +01002712}
2713
2714
Ben Murdochb0fe1622011-05-05 13:52:32 +01002715void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2716 Register result = ToRegister(instr->result());
2717 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2718}
2719
2720
2721void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002722 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002723 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002724 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002725}
2726
2727
2728void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2729 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00002730 LInstruction* instr,
2731 CallKind call_kind) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002732 // Change context if needed.
2733 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002734 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002735 scope()->contains_with() ||
2736 (scope()->num_heap_slots() > 0);
2737 if (change_context) {
2738 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2739 }
2740
2741 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2742 // is available to write to at this point.
2743 if (!function->NeedsArgumentsAdaption()) {
2744 __ mov(r0, Operand(arity));
2745 }
2746
2747 LPointerMap* pointers = instr->pointer_map();
2748 RecordPosition(pointers->position());
2749
2750 // Invoke function.
Ben Murdoch257744e2011-11-30 15:57:28 +00002751 __ SetCallKind(r5, call_kind);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002752 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2753 __ Call(ip);
2754
2755 // Setup deoptimization.
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002756 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002757
2758 // Restore context.
2759 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2760}
2761
2762
2763void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002764 ASSERT(ToRegister(instr->result()).is(r0));
2765 __ mov(r1, Operand(instr->function()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002766 CallKnownFunction(instr->function(),
2767 instr->arity(),
2768 instr,
2769 CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002770}
2771
2772
2773void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002774 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002775 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002776 Register scratch = scratch0();
2777
2778 // Deoptimize if not a heap number.
2779 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2780 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2781 __ cmp(scratch, Operand(ip));
2782 DeoptimizeIf(ne, instr->environment());
2783
2784 Label done;
2785 Register exponent = scratch0();
2786 scratch = no_reg;
2787 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2788 // Check the sign of the argument. If the argument is positive, just
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002789 // return it.
Steve Block1e0659c2011-05-24 12:43:12 +01002790 __ tst(exponent, Operand(HeapNumber::kSignMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002791 // Move the input to the result if necessary.
2792 __ Move(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01002793 __ b(eq, &done);
2794
2795 // Input is negative. Reverse its sign.
2796 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002797 {
2798 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002799
Ben Murdoch8b112d22011-06-08 16:22:53 +01002800 // Registers were saved at the safepoint, so we can use
2801 // many scratch registers.
2802 Register tmp1 = input.is(r1) ? r0 : r1;
2803 Register tmp2 = input.is(r2) ? r0 : r2;
2804 Register tmp3 = input.is(r3) ? r0 : r3;
2805 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01002806
Ben Murdoch8b112d22011-06-08 16:22:53 +01002807 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01002808
Ben Murdoch8b112d22011-06-08 16:22:53 +01002809 Label allocated, slow;
2810 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2811 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2812 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01002813
Ben Murdoch8b112d22011-06-08 16:22:53 +01002814 // Slow case: Call the runtime system to do the number allocation.
2815 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01002816
Ben Murdoch8b112d22011-06-08 16:22:53 +01002817 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2818 // Set the pointer to the new heap number in tmp.
2819 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2820 // Restore input_reg after call to runtime.
2821 __ LoadFromSafepointRegisterSlot(input, input);
2822 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002823
Ben Murdoch8b112d22011-06-08 16:22:53 +01002824 __ bind(&allocated);
2825 // exponent: floating point exponent value.
2826 // tmp1: allocated heap number.
2827 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2828 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2829 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2830 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002831
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002832 __ StoreToSafepointRegisterSlot(tmp1, result);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002833 }
Steve Block1e0659c2011-05-24 12:43:12 +01002834
2835 __ bind(&done);
2836}
2837
2838
2839void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2840 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002841 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002842 __ cmp(input, Operand(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002843 __ Move(result, input, pl);
Steve Block1e0659c2011-05-24 12:43:12 +01002844 // We can make rsb conditional because the previous cmp instruction
2845 // will clear the V (overflow) flag and rsb won't set this flag
2846 // if input is positive.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002847 __ rsb(result, input, Operand(0), SetCC, mi);
Steve Block1e0659c2011-05-24 12:43:12 +01002848 // Deoptimize on overflow.
2849 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002850}
2851
2852
2853void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002854 // Class for deferred case.
2855 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2856 public:
2857 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2858 LUnaryMathOperation* instr)
2859 : LDeferredCode(codegen), instr_(instr) { }
2860 virtual void Generate() {
2861 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2862 }
2863 private:
2864 LUnaryMathOperation* instr_;
2865 };
2866
Steve Block1e0659c2011-05-24 12:43:12 +01002867 Representation r = instr->hydrogen()->value()->representation();
2868 if (r.IsDouble()) {
2869 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002870 DwVfpRegister result = ToDoubleRegister(instr->result());
2871 __ vabs(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01002872 } else if (r.IsInteger32()) {
2873 EmitIntegerMathAbs(instr);
2874 } else {
2875 // Representation is tagged.
2876 DeferredMathAbsTaggedHeapNumber* deferred =
2877 new DeferredMathAbsTaggedHeapNumber(this, instr);
2878 Register input = ToRegister(instr->InputAt(0));
2879 // Smi check.
2880 __ JumpIfNotSmi(input, deferred->entry());
2881 // If smi, handle it directly.
2882 EmitIntegerMathAbs(instr);
2883 __ bind(deferred->exit());
2884 }
2885}
2886
2887
Ben Murdochb0fe1622011-05-05 13:52:32 +01002888void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002889 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002890 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002891 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002892 Register scratch1 = scratch0();
2893 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002894
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002895 __ EmitVFPTruncate(kRoundToMinusInf,
2896 single_scratch,
2897 input,
2898 scratch1,
2899 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002900 DeoptimizeIf(ne, instr->environment());
2901
2902 // Move the result back to general purpose register r0.
2903 __ vmov(result, single_scratch);
2904
Steve Block44f0eee2011-05-26 01:26:41 +01002905 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2906 // Test for -0.
2907 Label done;
2908 __ cmp(result, Operand(0));
2909 __ b(ne, &done);
2910 __ vmov(scratch1, input.high());
2911 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2912 DeoptimizeIf(ne, instr->environment());
2913 __ bind(&done);
2914 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002915}
2916
2917
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002918void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2919 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2920 Register result = ToRegister(instr->result());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002921 Register scratch = scratch0();
Ben Murdoch257744e2011-11-30 15:57:28 +00002922 Label done, check_sign_on_zero;
2923
2924 // Extract exponent bits.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002925 __ vmov(result, input.high());
2926 __ ubfx(scratch,
2927 result,
Ben Murdoch257744e2011-11-30 15:57:28 +00002928 HeapNumber::kExponentShift,
2929 HeapNumber::kExponentBits);
2930
2931 // If the number is in ]-0.5, +0.5[, the result is +/- 0.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002932 __ cmp(scratch, Operand(HeapNumber::kExponentBias - 2));
Ben Murdoch257744e2011-11-30 15:57:28 +00002933 __ mov(result, Operand(0), LeaveCC, le);
2934 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2935 __ b(le, &check_sign_on_zero);
2936 } else {
2937 __ b(le, &done);
2938 }
2939
2940 // The following conversion will not work with numbers
2941 // outside of ]-2^32, 2^32[.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002942 __ cmp(scratch, Operand(HeapNumber::kExponentBias + 32));
Ben Murdoch257744e2011-11-30 15:57:28 +00002943 DeoptimizeIf(ge, instr->environment());
2944
2945 // Save the original sign for later comparison.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002946 __ and_(scratch, result, Operand(HeapNumber::kSignMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00002947
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002948 __ Vmov(double_scratch0(), 0.5);
Ben Murdoch692be652012-01-10 18:47:50 +00002949 __ vadd(double_scratch0(), input, double_scratch0());
Ben Murdoch257744e2011-11-30 15:57:28 +00002950
2951 // Check sign of the result: if the sign changed, the input
2952 // value was in ]0.5, 0[ and the result should be -0.
Ben Murdoch692be652012-01-10 18:47:50 +00002953 __ vmov(result, double_scratch0().high());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002954 __ eor(result, result, Operand(scratch), SetCC);
Ben Murdoch257744e2011-11-30 15:57:28 +00002955 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2956 DeoptimizeIf(mi, instr->environment());
2957 } else {
2958 __ mov(result, Operand(0), LeaveCC, mi);
2959 __ b(mi, &done);
2960 }
2961
2962 __ EmitVFPTruncate(kRoundToMinusInf,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002963 double_scratch0().low(),
Ben Murdoch692be652012-01-10 18:47:50 +00002964 double_scratch0(),
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002965 result,
2966 scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002967 DeoptimizeIf(ne, instr->environment());
2968 __ vmov(result, double_scratch0().low());
2969
Steve Block44f0eee2011-05-26 01:26:41 +01002970 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2971 // Test for -0.
Steve Block44f0eee2011-05-26 01:26:41 +01002972 __ cmp(result, Operand(0));
2973 __ b(ne, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00002974 __ bind(&check_sign_on_zero);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002975 __ vmov(scratch, input.high());
2976 __ tst(scratch, Operand(HeapNumber::kSignMask));
Steve Block44f0eee2011-05-26 01:26:41 +01002977 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01002978 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002979 __ bind(&done);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002980}
2981
2982
Ben Murdochb0fe1622011-05-05 13:52:32 +01002983void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002984 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002985 DoubleRegister result = ToDoubleRegister(instr->result());
2986 __ vsqrt(result, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002987}
2988
2989
Steve Block44f0eee2011-05-26 01:26:41 +01002990void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2991 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002992 DoubleRegister result = ToDoubleRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +01002993 // Add +0 to convert -0 to +0.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002994 __ vadd(result, input, kDoubleRegZero);
2995 __ vsqrt(result, result);
Steve Block44f0eee2011-05-26 01:26:41 +01002996}
2997
2998
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002999void LCodeGen::DoPower(LPower* instr) {
3000 LOperand* left = instr->InputAt(0);
3001 LOperand* right = instr->InputAt(1);
3002 Register scratch = scratch0();
3003 DoubleRegister result_reg = ToDoubleRegister(instr->result());
3004 Representation exponent_type = instr->hydrogen()->right()->representation();
3005 if (exponent_type.IsDouble()) {
3006 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003007 __ PrepareCallCFunction(0, 2, scratch);
3008 __ SetCallCDoubleArguments(ToDoubleRegister(left),
3009 ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003010 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003011 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003012 } else if (exponent_type.IsInteger32()) {
3013 ASSERT(ToRegister(right).is(r0));
3014 // Prepare arguments and call C function.
Ben Murdoch257744e2011-11-30 15:57:28 +00003015 __ PrepareCallCFunction(1, 1, scratch);
3016 __ SetCallCDoubleArguments(ToDoubleRegister(left), ToRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01003017 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003018 ExternalReference::power_double_int_function(isolate()), 1, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003019 } else {
3020 ASSERT(exponent_type.IsTagged());
3021 ASSERT(instr->hydrogen()->left()->representation().IsDouble());
3022
3023 Register right_reg = ToRegister(right);
3024
3025 // Check for smi on the right hand side.
3026 Label non_smi, call;
3027 __ JumpIfNotSmi(right_reg, &non_smi);
3028
3029 // Untag smi and convert it to a double.
3030 __ SmiUntag(right_reg);
3031 SwVfpRegister single_scratch = double_scratch0().low();
3032 __ vmov(single_scratch, right_reg);
3033 __ vcvt_f64_s32(result_reg, single_scratch);
3034 __ jmp(&call);
3035
3036 // Heap number map check.
3037 __ bind(&non_smi);
3038 __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
3039 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3040 __ cmp(scratch, Operand(ip));
3041 DeoptimizeIf(ne, instr->environment());
3042 int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
3043 __ add(scratch, right_reg, Operand(value_offset));
3044 __ vldr(result_reg, scratch, 0);
3045
3046 // Prepare arguments and call C function.
3047 __ bind(&call);
Ben Murdoch257744e2011-11-30 15:57:28 +00003048 __ PrepareCallCFunction(0, 2, scratch);
3049 __ SetCallCDoubleArguments(ToDoubleRegister(left), result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01003050 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00003051 ExternalReference::power_double_double_function(isolate()), 0, 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003052 }
3053 // Store the result in the result register.
3054 __ GetCFunctionDoubleResult(result_reg);
3055}
3056
3057
3058void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3059 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3060 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3061 TranscendentalCacheStub::UNTAGGED);
3062 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3063}
3064
3065
3066void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3067 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3068 TranscendentalCacheStub stub(TranscendentalCache::COS,
3069 TranscendentalCacheStub::UNTAGGED);
3070 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3071}
3072
3073
3074void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3075 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3076 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3077 TranscendentalCacheStub::UNTAGGED);
3078 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3079}
3080
3081
Ben Murdochb0fe1622011-05-05 13:52:32 +01003082void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3083 switch (instr->op()) {
3084 case kMathAbs:
3085 DoMathAbs(instr);
3086 break;
3087 case kMathFloor:
3088 DoMathFloor(instr);
3089 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003090 case kMathRound:
3091 DoMathRound(instr);
3092 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003093 case kMathSqrt:
3094 DoMathSqrt(instr);
3095 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003096 case kMathPowHalf:
3097 DoMathPowHalf(instr);
3098 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003099 case kMathCos:
3100 DoMathCos(instr);
3101 break;
3102 case kMathSin:
3103 DoMathSin(instr);
3104 break;
3105 case kMathLog:
3106 DoMathLog(instr);
3107 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003108 default:
3109 Abort("Unimplemented type of LUnaryMathOperation.");
3110 UNREACHABLE();
3111 }
3112}
3113
3114
Ben Murdoch257744e2011-11-30 15:57:28 +00003115void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3116 ASSERT(ToRegister(instr->function()).is(r1));
3117 ASSERT(instr->HasPointerMap());
3118 ASSERT(instr->HasDeoptimizationEnvironment());
3119 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch257744e2011-11-30 15:57:28 +00003120 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00003121 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00003122 ParameterCount count(instr->arity());
3123 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3124 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3125}
3126
3127
Ben Murdochb0fe1622011-05-05 13:52:32 +01003128void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003129 ASSERT(ToRegister(instr->result()).is(r0));
3130
3131 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003132 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003133 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003134 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3135 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003136}
3137
3138
3139void LCodeGen::DoCallNamed(LCallNamed* instr) {
3140 ASSERT(ToRegister(instr->result()).is(r0));
3141
3142 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003143 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3144 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003145 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003146 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003147 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003148 // Restore context register.
3149 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3150}
3151
3152
3153void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003154 ASSERT(ToRegister(instr->result()).is(r0));
3155
3156 int arity = instr->arity();
Ben Murdoch589d6972011-11-30 16:04:58 +00003157 CallFunctionStub stub(arity, RECEIVER_MIGHT_BE_IMPLICIT);
Steve Block9fac8402011-05-12 15:51:54 +01003158 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3159 __ Drop(1);
3160 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003161}
3162
3163
3164void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003165 ASSERT(ToRegister(instr->result()).is(r0));
3166
3167 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003168 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
Steve Block44f0eee2011-05-26 01:26:41 +01003169 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003170 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003171 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003172 CallCode(ic, mode, instr);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003173 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003174}
3175
3176
3177void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3178 ASSERT(ToRegister(instr->result()).is(r0));
3179 __ mov(r1, Operand(instr->target()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003180 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003181}
3182
3183
3184void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003185 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003186 ASSERT(ToRegister(instr->result()).is(r0));
3187
Steve Block44f0eee2011-05-26 01:26:41 +01003188 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003189 __ mov(r0, Operand(instr->arity()));
3190 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3191}
3192
3193
3194void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3195 CallRuntime(instr->function(), instr->arity(), instr);
3196}
3197
3198
3199void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003200 Register object = ToRegister(instr->object());
3201 Register value = ToRegister(instr->value());
3202 Register scratch = scratch0();
3203 int offset = instr->offset();
3204
3205 ASSERT(!object.is(value));
3206
3207 if (!instr->transition().is_null()) {
3208 __ mov(scratch, Operand(instr->transition()));
3209 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3210 }
3211
3212 // Do the store.
3213 if (instr->is_in_object()) {
3214 __ str(value, FieldMemOperand(object, offset));
3215 if (instr->needs_write_barrier()) {
3216 // Update the write barrier for the object for in-object properties.
3217 __ RecordWrite(object, Operand(offset), value, scratch);
3218 }
3219 } else {
3220 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3221 __ str(value, FieldMemOperand(scratch, offset));
3222 if (instr->needs_write_barrier()) {
3223 // Update the write barrier for the properties array.
3224 // object is used as a scratch register.
3225 __ RecordWrite(scratch, Operand(offset), value, object);
3226 }
3227 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003228}
3229
3230
3231void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3232 ASSERT(ToRegister(instr->object()).is(r1));
3233 ASSERT(ToRegister(instr->value()).is(r0));
3234
3235 // Name is always in r2.
3236 __ mov(r2, Operand(instr->name()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003237 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003238 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3239 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003240 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3241}
3242
3243
3244void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003245 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003246 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003247}
3248
3249
3250void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003251 Register value = ToRegister(instr->value());
3252 Register elements = ToRegister(instr->object());
3253 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3254 Register scratch = scratch0();
3255
3256 // Do the store.
3257 if (instr->key()->IsConstantOperand()) {
3258 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3259 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3260 int offset =
3261 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3262 __ str(value, FieldMemOperand(elements, offset));
3263 } else {
3264 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3265 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3266 }
3267
3268 if (instr->hydrogen()->NeedsWriteBarrier()) {
3269 // Compute address of modified element and store it into key register.
3270 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
3271 __ RecordWrite(elements, key, value);
3272 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003273}
3274
3275
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003276void LCodeGen::DoStoreKeyedFastDoubleElement(
3277 LStoreKeyedFastDoubleElement* instr) {
3278 DwVfpRegister value = ToDoubleRegister(instr->value());
3279 Register elements = ToRegister(instr->elements());
3280 Register key = no_reg;
3281 Register scratch = scratch0();
3282 bool key_is_constant = instr->key()->IsConstantOperand();
3283 int constant_key = 0;
3284 Label not_nan;
3285
3286 // Calculate the effective address of the slot in the array to store the
3287 // double value.
3288 if (key_is_constant) {
3289 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3290 if (constant_key & 0xF0000000) {
3291 Abort("array index constant value too big.");
3292 }
3293 } else {
3294 key = ToRegister(instr->key());
3295 }
Ben Murdoch589d6972011-11-30 16:04:58 +00003296 int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003297 Operand operand = key_is_constant
3298 ? Operand(constant_key * (1 << shift_size) +
3299 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
3300 : Operand(key, LSL, shift_size);
3301 __ add(scratch, elements, operand);
3302 if (!key_is_constant) {
3303 __ add(scratch, scratch,
3304 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3305 }
3306
3307 // Check for NaN. All NaNs must be canonicalized.
3308 __ VFPCompareAndSetFlags(value, value);
3309
3310 // Only load canonical NaN if the comparison above set the overflow.
3311 __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs);
3312
3313 __ bind(&not_nan);
3314 __ vstr(value, scratch, 0);
3315}
3316
3317
Steve Block44f0eee2011-05-26 01:26:41 +01003318void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3319 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003320
3321 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00003322 Register key = no_reg;
Ben Murdoch589d6972011-11-30 16:04:58 +00003323 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00003324 bool key_is_constant = instr->key()->IsConstantOperand();
3325 int constant_key = 0;
3326 if (key_is_constant) {
3327 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3328 if (constant_key & 0xF0000000) {
3329 Abort("array index constant value too big.");
3330 }
3331 } else {
3332 key = ToRegister(instr->key());
3333 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003334 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00003335
Ben Murdoch589d6972011-11-30 16:04:58 +00003336 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
3337 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003338 CpuFeatures::Scope scope(VFP3);
3339 DwVfpRegister value(ToDoubleRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003340 Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
3341 : Operand(key, LSL, shift_size));
3342 __ add(scratch0(), external_pointer, operand);
Ben Murdoch589d6972011-11-30 16:04:58 +00003343 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003344 __ vcvt_f32_f64(double_scratch0().low(), value);
3345 __ vstr(double_scratch0().low(), scratch0(), 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00003346 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00003347 __ vstr(value, scratch0(), 0);
3348 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003349 } else {
3350 Register value(ToRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003351 MemOperand mem_operand(key_is_constant
3352 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
3353 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003354 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00003355 case EXTERNAL_PIXEL_ELEMENTS:
3356 case EXTERNAL_BYTE_ELEMENTS:
3357 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003358 __ strb(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003359 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003360 case EXTERNAL_SHORT_ELEMENTS:
3361 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003362 __ strh(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003363 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003364 case EXTERNAL_INT_ELEMENTS:
3365 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003366 __ str(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003367 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003368 case EXTERNAL_FLOAT_ELEMENTS:
3369 case EXTERNAL_DOUBLE_ELEMENTS:
3370 case FAST_DOUBLE_ELEMENTS:
3371 case FAST_ELEMENTS:
3372 case DICTIONARY_ELEMENTS:
3373 case NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01003374 UNREACHABLE();
3375 break;
3376 }
3377 }
Steve Block44f0eee2011-05-26 01:26:41 +01003378}
3379
3380
Ben Murdochb0fe1622011-05-05 13:52:32 +01003381void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3382 ASSERT(ToRegister(instr->object()).is(r2));
3383 ASSERT(ToRegister(instr->key()).is(r1));
3384 ASSERT(ToRegister(instr->value()).is(r0));
3385
Ben Murdoch8b112d22011-06-08 16:22:53 +01003386 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003387 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3388 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003389 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3390}
3391
3392
Ben Murdoch257744e2011-11-30 15:57:28 +00003393void LCodeGen::DoStringAdd(LStringAdd* instr) {
3394 __ push(ToRegister(instr->left()));
3395 __ push(ToRegister(instr->right()));
3396 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3397 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3398}
3399
3400
Steve Block1e0659c2011-05-24 12:43:12 +01003401void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3402 class DeferredStringCharCodeAt: public LDeferredCode {
3403 public:
3404 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3405 : LDeferredCode(codegen), instr_(instr) { }
3406 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3407 private:
3408 LStringCharCodeAt* instr_;
3409 };
3410
Steve Block1e0659c2011-05-24 12:43:12 +01003411 Register string = ToRegister(instr->string());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003412 Register index = ToRegister(instr->index());
Steve Block1e0659c2011-05-24 12:43:12 +01003413 Register result = ToRegister(instr->result());
3414
3415 DeferredStringCharCodeAt* deferred =
3416 new DeferredStringCharCodeAt(this, instr);
3417
Steve Block1e0659c2011-05-24 12:43:12 +01003418 // Fetch the instance type of the receiver into result register.
3419 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3420 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3421
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003422 // We need special handling for indirect strings.
3423 Label check_sequential;
3424 __ tst(result, Operand(kIsIndirectStringMask));
3425 __ b(eq, &check_sequential);
Steve Block1e0659c2011-05-24 12:43:12 +01003426
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003427 // Dispatch on the indirect string shape: slice or cons.
3428 Label cons_string;
3429 __ tst(result, Operand(kSlicedNotConsMask));
3430 __ b(eq, &cons_string);
Steve Block1e0659c2011-05-24 12:43:12 +01003431
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003432 // Handle slices.
3433 Label indirect_string_loaded;
3434 __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
3435 __ add(index, index, Operand(result, ASR, kSmiTagSize));
3436 __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
3437 __ jmp(&indirect_string_loaded);
3438
3439 // Handle conses.
Steve Block1e0659c2011-05-24 12:43:12 +01003440 // Check whether the right hand side is the empty string (i.e. if
3441 // this is really a flat string in a cons string). If that is not
3442 // the case we would rather go to the runtime system now to flatten
3443 // the string.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003444 __ bind(&cons_string);
3445 __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01003446 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003447 __ cmp(result, ip);
Steve Block1e0659c2011-05-24 12:43:12 +01003448 __ b(ne, deferred->entry());
3449 // Get the first of the two strings and load its instance type.
3450 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003451
3452 __ bind(&indirect_string_loaded);
Steve Block1e0659c2011-05-24 12:43:12 +01003453 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3454 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003455
3456 // Check whether the string is sequential. The only non-sequential
3457 // shapes we support have just been unwrapped above.
3458 __ bind(&check_sequential);
Steve Block1e0659c2011-05-24 12:43:12 +01003459 STATIC_ASSERT(kSeqStringTag == 0);
3460 __ tst(result, Operand(kStringRepresentationMask));
3461 __ b(ne, deferred->entry());
3462
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003463 // Dispatch on the encoding: ASCII or two-byte.
3464 Label ascii_string;
Ben Murdoch589d6972011-11-30 16:04:58 +00003465 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
3466 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
Steve Block1e0659c2011-05-24 12:43:12 +01003467 __ tst(result, Operand(kStringEncodingMask));
3468 __ b(ne, &ascii_string);
3469
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003470 // Two-byte string.
3471 // Load the two-byte character code into the result register.
3472 Label done;
3473 __ add(result,
3474 string,
3475 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3476 __ ldrh(result, MemOperand(result, index, LSL, 1));
Steve Block1e0659c2011-05-24 12:43:12 +01003477 __ jmp(&done);
3478
3479 // ASCII string.
3480 // Load the byte into the result register.
3481 __ bind(&ascii_string);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003482 __ add(result,
3483 string,
3484 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3485 __ ldrb(result, MemOperand(result, index));
3486
Steve Block1e0659c2011-05-24 12:43:12 +01003487 __ bind(&done);
3488 __ bind(deferred->exit());
3489}
3490
3491
3492void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3493 Register string = ToRegister(instr->string());
3494 Register result = ToRegister(instr->result());
3495 Register scratch = scratch0();
3496
3497 // TODO(3095996): Get rid of this. For now, we need to make the
3498 // result register contain a valid pointer because it is already
3499 // contained in the register pointer map.
3500 __ mov(result, Operand(0));
3501
Ben Murdoch8b112d22011-06-08 16:22:53 +01003502 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003503 __ push(string);
3504 // Push the index as a smi. This is safe because of the checks in
3505 // DoStringCharCodeAt above.
3506 if (instr->index()->IsConstantOperand()) {
3507 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3508 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3509 __ push(scratch);
3510 } else {
3511 Register index = ToRegister(instr->index());
3512 __ SmiTag(index);
3513 __ push(index);
3514 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003515 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003516 if (FLAG_debug_code) {
3517 __ AbortIfNotSmi(r0);
3518 }
3519 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003520 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003521}
3522
3523
Steve Block44f0eee2011-05-26 01:26:41 +01003524void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3525 class DeferredStringCharFromCode: public LDeferredCode {
3526 public:
3527 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3528 : LDeferredCode(codegen), instr_(instr) { }
3529 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3530 private:
3531 LStringCharFromCode* instr_;
3532 };
3533
3534 DeferredStringCharFromCode* deferred =
3535 new DeferredStringCharFromCode(this, instr);
3536
3537 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3538 Register char_code = ToRegister(instr->char_code());
3539 Register result = ToRegister(instr->result());
3540 ASSERT(!char_code.is(result));
3541
3542 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3543 __ b(hi, deferred->entry());
3544 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3545 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3546 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3547 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3548 __ cmp(result, ip);
3549 __ b(eq, deferred->entry());
3550 __ bind(deferred->exit());
3551}
3552
3553
3554void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3555 Register char_code = ToRegister(instr->char_code());
3556 Register result = ToRegister(instr->result());
3557
3558 // TODO(3095996): Get rid of this. For now, we need to make the
3559 // result register contain a valid pointer because it is already
3560 // contained in the register pointer map.
3561 __ mov(result, Operand(0));
3562
Ben Murdoch8b112d22011-06-08 16:22:53 +01003563 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003564 __ SmiTag(char_code);
3565 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003566 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003567 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003568}
3569
3570
Steve Block1e0659c2011-05-24 12:43:12 +01003571void LCodeGen::DoStringLength(LStringLength* instr) {
3572 Register string = ToRegister(instr->InputAt(0));
3573 Register result = ToRegister(instr->result());
3574 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3575}
3576
3577
Ben Murdochb0fe1622011-05-05 13:52:32 +01003578void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003579 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003580 ASSERT(input->IsRegister() || input->IsStackSlot());
3581 LOperand* output = instr->result();
3582 ASSERT(output->IsDoubleRegister());
3583 SwVfpRegister single_scratch = double_scratch0().low();
3584 if (input->IsStackSlot()) {
3585 Register scratch = scratch0();
3586 __ ldr(scratch, ToMemOperand(input));
3587 __ vmov(single_scratch, scratch);
3588 } else {
3589 __ vmov(single_scratch, ToRegister(input));
3590 }
3591 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003592}
3593
3594
3595void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3596 class DeferredNumberTagI: public LDeferredCode {
3597 public:
3598 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3599 : LDeferredCode(codegen), instr_(instr) { }
3600 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3601 private:
3602 LNumberTagI* instr_;
3603 };
3604
Steve Block1e0659c2011-05-24 12:43:12 +01003605 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003606 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3607 Register reg = ToRegister(input);
3608
3609 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3610 __ SmiTag(reg, SetCC);
3611 __ b(vs, deferred->entry());
3612 __ bind(deferred->exit());
3613}
3614
3615
3616void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3617 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003618 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003619 DoubleRegister dbl_scratch = double_scratch0();
3620 SwVfpRegister flt_scratch = dbl_scratch.low();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003621
3622 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003623 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003624
3625 // There was overflow, so bits 30 and 31 of the original integer
3626 // disagree. Try to allocate a heap number in new space and store
3627 // the value in there. If that fails, call the runtime system.
3628 Label done;
3629 __ SmiUntag(reg);
3630 __ eor(reg, reg, Operand(0x80000000));
3631 __ vmov(flt_scratch, reg);
3632 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3633 if (FLAG_inline_new) {
3634 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3635 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3636 if (!reg.is(r5)) __ mov(reg, r5);
3637 __ b(&done);
3638 }
3639
3640 // Slow case: Call the runtime system to do the number allocation.
3641 __ bind(&slow);
3642
3643 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3644 // register is stored, as this register is in the pointer map, but contains an
3645 // integer value.
3646 __ mov(ip, Operand(0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003647 __ StoreToSafepointRegisterSlot(ip, reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003648 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003649 if (!reg.is(r0)) __ mov(reg, r0);
3650
3651 // Done. Put the value in dbl_scratch into the value of the allocated heap
3652 // number.
3653 __ bind(&done);
3654 __ sub(ip, reg, Operand(kHeapObjectTag));
3655 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003656 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003657}
3658
3659
3660void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3661 class DeferredNumberTagD: public LDeferredCode {
3662 public:
3663 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3664 : LDeferredCode(codegen), instr_(instr) { }
3665 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3666 private:
3667 LNumberTagD* instr_;
3668 };
3669
Steve Block1e0659c2011-05-24 12:43:12 +01003670 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003671 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003672 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003673 Register temp1 = ToRegister(instr->TempAt(0));
3674 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003675
3676 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3677 if (FLAG_inline_new) {
3678 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3679 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3680 } else {
3681 __ jmp(deferred->entry());
3682 }
3683 __ bind(deferred->exit());
3684 __ sub(ip, reg, Operand(kHeapObjectTag));
3685 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3686}
3687
3688
3689void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3690 // TODO(3095996): Get rid of this. For now, we need to make the
3691 // result register contain a valid pointer because it is already
3692 // contained in the register pointer map.
3693 Register reg = ToRegister(instr->result());
3694 __ mov(reg, Operand(0));
3695
Ben Murdoch8b112d22011-06-08 16:22:53 +01003696 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3697 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003698 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003699}
3700
3701
3702void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003703 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003704 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3705 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3706 __ SmiTag(ToRegister(input));
3707}
3708
3709
3710void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003711 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003712 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3713 if (instr->needs_check()) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003714 STATIC_ASSERT(kHeapObjectTag == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003715 // If the input is a HeapObject, SmiUntag will set the carry flag.
3716 __ SmiUntag(ToRegister(input), SetCC);
3717 DeoptimizeIf(cs, instr->environment());
3718 } else {
3719 __ SmiUntag(ToRegister(input));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003720 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003721}
3722
3723
3724void LCodeGen::EmitNumberUntagD(Register input_reg,
3725 DoubleRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003726 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003727 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003728 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003729 SwVfpRegister flt_scratch = double_scratch0().low();
3730 ASSERT(!result_reg.is(double_scratch0()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003731
3732 Label load_smi, heap_number, done;
3733
3734 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003735 __ JumpIfSmi(input_reg, &load_smi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003736
3737 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003738 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003739 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003740 __ cmp(scratch, Operand(ip));
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003741 if (deoptimize_on_undefined) {
3742 DeoptimizeIf(ne, env);
3743 } else {
3744 Label heap_number;
3745 __ b(eq, &heap_number);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003746
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003747 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3748 __ cmp(input_reg, Operand(ip));
3749 DeoptimizeIf(ne, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003750
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003751 // Convert undefined to NaN.
3752 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3753 __ sub(ip, ip, Operand(kHeapObjectTag));
3754 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3755 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003756
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003757 __ bind(&heap_number);
3758 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003759 // Heap number to double register conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003760 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3761 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3762 __ jmp(&done);
3763
3764 // Smi to double register conversion
3765 __ bind(&load_smi);
3766 __ SmiUntag(input_reg); // Untag smi before converting to float.
3767 __ vmov(flt_scratch, input_reg);
3768 __ vcvt_f64_s32(result_reg, flt_scratch);
3769 __ SmiTag(input_reg); // Retag smi.
3770 __ bind(&done);
3771}
3772
3773
3774class DeferredTaggedToI: public LDeferredCode {
3775 public:
3776 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3777 : LDeferredCode(codegen), instr_(instr) { }
3778 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3779 private:
3780 LTaggedToI* instr_;
3781};
3782
3783
3784void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003785 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003786 Register scratch1 = scratch0();
3787 Register scratch2 = ToRegister(instr->TempAt(0));
3788 DwVfpRegister double_scratch = double_scratch0();
3789 SwVfpRegister single_scratch = double_scratch.low();
3790
3791 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3792 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3793
3794 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003795
Ben Murdoch257744e2011-11-30 15:57:28 +00003796 // The input was optimistically untagged; revert it.
3797 // The carry flag is set when we reach this deferred code as we just executed
3798 // SmiUntag(heap_object, SetCC)
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003799 STATIC_ASSERT(kHeapObjectTag == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00003800 __ adc(input_reg, input_reg, Operand(input_reg));
3801
Ben Murdochb0fe1622011-05-05 13:52:32 +01003802 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01003803 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003804 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01003805 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003806
3807 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003808 Register scratch3 = ToRegister(instr->TempAt(1));
3809 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3810 ASSERT(!scratch3.is(input_reg) &&
3811 !scratch3.is(scratch1) &&
3812 !scratch3.is(scratch2));
3813 // Performs a truncating conversion of a floating point number as used by
3814 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003815 Label heap_number;
3816 __ b(eq, &heap_number);
3817 // Check for undefined. Undefined is converted to zero for truncating
3818 // conversions.
3819 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3820 __ cmp(input_reg, Operand(ip));
3821 DeoptimizeIf(ne, instr->environment());
3822 __ mov(input_reg, Operand(0));
3823 __ b(&done);
3824
3825 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01003826 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3827 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3828
3829 __ EmitECMATruncate(input_reg,
3830 double_scratch2,
3831 single_scratch,
3832 scratch1,
3833 scratch2,
3834 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003835
3836 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003837 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003838 // Deoptimize if we don't have a heap number.
3839 DeoptimizeIf(ne, instr->environment());
3840
3841 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003842 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3843 __ EmitVFPTruncate(kRoundToZero,
3844 single_scratch,
3845 double_scratch,
3846 scratch1,
3847 scratch2,
3848 kCheckForInexactConversion);
3849 DeoptimizeIf(ne, instr->environment());
3850 // Load the result.
3851 __ vmov(input_reg, single_scratch);
3852
Ben Murdochb0fe1622011-05-05 13:52:32 +01003853 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003854 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003855 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01003856 __ vmov(scratch1, double_scratch.high());
3857 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003858 DeoptimizeIf(ne, instr->environment());
3859 }
3860 }
3861 __ bind(&done);
3862}
3863
3864
3865void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003866 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003867 ASSERT(input->IsRegister());
3868 ASSERT(input->Equals(instr->result()));
3869
3870 Register input_reg = ToRegister(input);
3871
3872 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3873
Ben Murdoch257744e2011-11-30 15:57:28 +00003874 // Optimistically untag the input.
3875 // If the input is a HeapObject, SmiUntag will set the carry flag.
3876 __ SmiUntag(input_reg, SetCC);
3877 // Branch to deferred code if the input was tagged.
3878 // The deferred code will take care of restoring the tag.
3879 __ b(cs, deferred->entry());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003880 __ bind(deferred->exit());
3881}
3882
3883
3884void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003885 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003886 ASSERT(input->IsRegister());
3887 LOperand* result = instr->result();
3888 ASSERT(result->IsDoubleRegister());
3889
3890 Register input_reg = ToRegister(input);
3891 DoubleRegister result_reg = ToDoubleRegister(result);
3892
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003893 EmitNumberUntagD(input_reg, result_reg,
3894 instr->hydrogen()->deoptimize_on_undefined(),
3895 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003896}
3897
3898
3899void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003900 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003901 Register scratch1 = scratch0();
3902 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003903 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003904 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003905
Steve Block44f0eee2011-05-26 01:26:41 +01003906 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003907
Steve Block44f0eee2011-05-26 01:26:41 +01003908 if (instr->truncating()) {
3909 Register scratch3 = ToRegister(instr->TempAt(1));
3910 __ EmitECMATruncate(result_reg,
3911 double_input,
3912 single_scratch,
3913 scratch1,
3914 scratch2,
3915 scratch3);
3916 } else {
3917 VFPRoundingMode rounding_mode = kRoundToMinusInf;
3918 __ EmitVFPTruncate(rounding_mode,
3919 single_scratch,
3920 double_input,
3921 scratch1,
3922 scratch2,
3923 kCheckForInexactConversion);
3924 // Deoptimize if we had a vfp invalid exception,
3925 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01003926 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003927 // Retrieve the result.
3928 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003929 }
Steve Block44f0eee2011-05-26 01:26:41 +01003930 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003931}
3932
3933
3934void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003935 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003936 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003937 DeoptimizeIf(ne, instr->environment());
3938}
3939
3940
3941void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3942 LOperand* input = instr->InputAt(0);
3943 __ tst(ToRegister(input), Operand(kSmiTagMask));
3944 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003945}
3946
3947
3948void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003949 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003950 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01003951
3952 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3953 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003954
Ben Murdoch257744e2011-11-30 15:57:28 +00003955 if (instr->hydrogen()->is_interval_check()) {
3956 InstanceType first;
3957 InstanceType last;
3958 instr->hydrogen()->GetCheckInterval(&first, &last);
3959
3960 __ cmp(scratch, Operand(first));
3961
3962 // If there is only one type in the interval check for equality.
3963 if (first == last) {
3964 DeoptimizeIf(ne, instr->environment());
3965 } else {
3966 DeoptimizeIf(lo, instr->environment());
3967 // Omit check for the last type.
3968 if (last != LAST_TYPE) {
3969 __ cmp(scratch, Operand(last));
3970 DeoptimizeIf(hi, instr->environment());
3971 }
3972 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01003973 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003974 uint8_t mask;
3975 uint8_t tag;
3976 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
3977
3978 if (IsPowerOf2(mask)) {
3979 ASSERT(tag == 0 || IsPowerOf2(tag));
3980 __ tst(scratch, Operand(mask));
3981 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment());
3982 } else {
3983 __ and_(scratch, scratch, Operand(mask));
3984 __ cmp(scratch, Operand(tag));
3985 DeoptimizeIf(ne, instr->environment());
Ben Murdoch086aeea2011-05-13 15:57:08 +01003986 }
3987 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003988}
3989
3990
3991void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003992 ASSERT(instr->InputAt(0)->IsRegister());
3993 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003994 __ cmp(reg, Operand(instr->hydrogen()->target()));
3995 DeoptimizeIf(ne, instr->environment());
3996}
3997
3998
3999void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004000 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01004001 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004002 ASSERT(input->IsRegister());
4003 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01004004 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
4005 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004006 DeoptimizeIf(ne, instr->environment());
4007}
4008
4009
Ben Murdoch257744e2011-11-30 15:57:28 +00004010void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4011 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
4012 Register result_reg = ToRegister(instr->result());
4013 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4014 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
4015}
4016
4017
4018void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4019 Register unclamped_reg = ToRegister(instr->unclamped());
4020 Register result_reg = ToRegister(instr->result());
4021 __ ClampUint8(result_reg, unclamped_reg);
4022}
4023
4024
4025void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4026 Register scratch = scratch0();
4027 Register input_reg = ToRegister(instr->unclamped());
4028 Register result_reg = ToRegister(instr->result());
4029 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4030 Label is_smi, done, heap_number;
4031
4032 // Both smi and heap number cases are handled.
4033 __ JumpIfSmi(input_reg, &is_smi);
4034
4035 // Check for heap number
4036 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4037 __ cmp(scratch, Operand(factory()->heap_number_map()));
4038 __ b(eq, &heap_number);
4039
4040 // Check for undefined. Undefined is converted to zero for clamping
4041 // conversions.
4042 __ cmp(input_reg, Operand(factory()->undefined_value()));
4043 DeoptimizeIf(ne, instr->environment());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004044 __ mov(result_reg, Operand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00004045 __ jmp(&done);
4046
4047 // Heap number
4048 __ bind(&heap_number);
4049 __ vldr(double_scratch0(), FieldMemOperand(input_reg,
4050 HeapNumber::kValueOffset));
4051 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
4052 __ jmp(&done);
4053
4054 // smi
4055 __ bind(&is_smi);
4056 __ SmiUntag(result_reg, input_reg);
4057 __ ClampUint8(result_reg, result_reg);
4058
4059 __ bind(&done);
4060}
4061
4062
Ben Murdochb8e0da22011-05-16 14:20:40 +01004063void LCodeGen::LoadHeapObject(Register result,
4064 Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01004065 if (heap()->InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01004066 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01004067 factory()->NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01004068 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01004069 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01004070 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004071 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01004072 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004073}
4074
4075
4076void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004077 Register temp1 = ToRegister(instr->TempAt(0));
4078 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01004079
4080 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01004081 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01004082
4083 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004084 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004085
4086 // Check prototype maps up to the holder.
4087 while (!current_prototype.is_identical_to(holder)) {
4088 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4089 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4090 DeoptimizeIf(ne, instr->environment());
4091 current_prototype =
4092 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4093 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004094 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004095 }
4096
4097 // Check the holder map.
4098 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
4099 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
4100 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004101}
4102
4103
4104void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004105 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4106 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4107 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4108 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
4109 __ Push(r3, r2, r1);
4110
4111 // Pick the right runtime function or stub to call.
4112 int length = instr->hydrogen()->length();
4113 if (instr->hydrogen()->IsCopyOnWrite()) {
4114 ASSERT(instr->hydrogen()->depth() == 1);
4115 FastCloneShallowArrayStub::Mode mode =
4116 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4117 FastCloneShallowArrayStub stub(mode, length);
4118 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4119 } else if (instr->hydrogen()->depth() > 1) {
4120 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4121 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4122 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4123 } else {
4124 FastCloneShallowArrayStub::Mode mode =
4125 FastCloneShallowArrayStub::CLONE_ELEMENTS;
4126 FastCloneShallowArrayStub stub(mode, length);
4127 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4128 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004129}
4130
4131
4132void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004133 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4134 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
4135 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4136 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
4137 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
4138 __ Push(r4, r3, r2, r1);
4139
4140 // Pick the right runtime function to call.
4141 if (instr->hydrogen()->depth() > 1) {
4142 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
4143 } else {
4144 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
4145 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004146}
4147
4148
Steve Block44f0eee2011-05-26 01:26:41 +01004149void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4150 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
4151 __ push(r0);
4152 CallRuntime(Runtime::kToFastProperties, 1, instr);
4153}
4154
4155
Ben Murdochb0fe1622011-05-05 13:52:32 +01004156void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004157 Label materialized;
4158 // Registers will be used as follows:
4159 // r3 = JS function.
4160 // r7 = literals array.
4161 // r1 = regexp literal.
4162 // r0 = regexp literal clone.
4163 // r2 and r4-r6 are used as temporaries.
4164 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4165 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4166 int literal_offset = FixedArray::kHeaderSize +
4167 instr->hydrogen()->literal_index() * kPointerSize;
4168 __ ldr(r1, FieldMemOperand(r7, literal_offset));
4169 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4170 __ cmp(r1, ip);
4171 __ b(ne, &materialized);
4172
4173 // Create regexp literal using runtime function
4174 // Result will be in r0.
4175 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4176 __ mov(r5, Operand(instr->hydrogen()->pattern()));
4177 __ mov(r4, Operand(instr->hydrogen()->flags()));
4178 __ Push(r7, r6, r5, r4);
4179 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4180 __ mov(r1, r0);
4181
4182 __ bind(&materialized);
4183 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4184 Label allocated, runtime_allocate;
4185
4186 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
4187 __ jmp(&allocated);
4188
4189 __ bind(&runtime_allocate);
4190 __ mov(r0, Operand(Smi::FromInt(size)));
4191 __ Push(r1, r0);
4192 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4193 __ pop(r1);
4194
4195 __ bind(&allocated);
4196 // Copy the content into the newly allocated memory.
4197 // (Unroll copy loop once for better throughput).
4198 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4199 __ ldr(r3, FieldMemOperand(r1, i));
4200 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
4201 __ str(r3, FieldMemOperand(r0, i));
4202 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
4203 }
4204 if ((size % (2 * kPointerSize)) != 0) {
4205 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
4206 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
4207 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004208}
4209
4210
4211void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004212 // Use the fast case closure allocation code that allocates in new
4213 // space for nested functions that don't need literals cloning.
4214 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004215 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004216 if (!pretenure && shared_info->num_literals() == 0) {
4217 FastNewClosureStub stub(
4218 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004219 __ mov(r1, Operand(shared_info));
4220 __ push(r1);
4221 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4222 } else {
4223 __ mov(r2, Operand(shared_info));
4224 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004225 ? factory()->true_value()
4226 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004227 __ Push(cp, r2, r1);
4228 CallRuntime(Runtime::kNewClosure, 3, instr);
4229 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004230}
4231
4232
4233void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004234 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004235 __ push(input);
4236 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004237}
4238
4239
Ben Murdochb0fe1622011-05-05 13:52:32 +01004240void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004241 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004242 int true_block = chunk_->LookupDestination(instr->true_block_id());
4243 int false_block = chunk_->LookupDestination(instr->false_block_id());
4244 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4245 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4246
4247 Condition final_branch_condition = EmitTypeofIs(true_label,
4248 false_label,
4249 input,
4250 instr->type_literal());
4251
4252 EmitBranch(true_block, false_block, final_branch_condition);
4253}
4254
4255
4256Condition LCodeGen::EmitTypeofIs(Label* true_label,
4257 Label* false_label,
4258 Register input,
4259 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004260 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004261 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004262 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004263 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004264 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4265 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4266 __ cmp(input, Operand(ip));
4267 final_branch_condition = eq;
4268
Steve Block44f0eee2011-05-26 01:26:41 +01004269 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004270 __ JumpIfSmi(input, false_label);
4271 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4272 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004273 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4274 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004275 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004276
Steve Block44f0eee2011-05-26 01:26:41 +01004277 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004278 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004279 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004280 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004281 final_branch_condition = eq;
4282
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004283 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4284 __ CompareRoot(input, Heap::kNullValueRootIndex);
4285 final_branch_condition = eq;
4286
Steve Block44f0eee2011-05-26 01:26:41 +01004287 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004288 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004289 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004290 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004291 // Check for undetectable objects => true.
4292 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4293 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4294 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4295 final_branch_condition = ne;
4296
Steve Block44f0eee2011-05-26 01:26:41 +01004297 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004298 __ JumpIfSmi(input, false_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004299 __ CompareObjectType(input, input, scratch,
4300 FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004301 final_branch_condition = ge;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004302
Steve Block44f0eee2011-05-26 01:26:41 +01004303 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004304 __ JumpIfSmi(input, false_label);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004305 if (!FLAG_harmony_typeof) {
4306 __ CompareRoot(input, Heap::kNullValueRootIndex);
4307 __ b(eq, true_label);
4308 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004309 __ CompareObjectType(input, input, scratch,
4310 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4311 __ b(lt, false_label);
4312 __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4313 __ b(gt, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004314 // Check for undetectable objects => false.
4315 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4316 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004317 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004318
4319 } else {
4320 final_branch_condition = ne;
4321 __ b(false_label);
4322 // A dead branch instruction will be generated after this point.
4323 }
4324
4325 return final_branch_condition;
4326}
4327
4328
Steve Block1e0659c2011-05-24 12:43:12 +01004329void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4330 Register temp1 = ToRegister(instr->TempAt(0));
4331 int true_block = chunk_->LookupDestination(instr->true_block_id());
4332 int false_block = chunk_->LookupDestination(instr->false_block_id());
4333
4334 EmitIsConstructCall(temp1, scratch0());
4335 EmitBranch(true_block, false_block, eq);
4336}
4337
4338
4339void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4340 ASSERT(!temp1.is(temp2));
4341 // Get the frame pointer for the calling frame.
4342 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4343
4344 // Skip the arguments adaptor frame if it exists.
4345 Label check_frame_marker;
4346 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4347 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4348 __ b(ne, &check_frame_marker);
4349 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4350
4351 // Check the marker in the calling frame.
4352 __ bind(&check_frame_marker);
4353 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4354 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4355}
4356
4357
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004358void LCodeGen::EnsureSpaceForLazyDeopt() {
4359 // Ensure that we have enough space after the previous lazy-bailout
4360 // instruction for patching the code here.
4361 int current_pc = masm()->pc_offset();
4362 int patch_size = Deoptimizer::patch_size();
4363 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4364 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
4365 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
4366 while (padding_size > 0) {
4367 __ nop();
4368 padding_size -= Assembler::kInstrSize;
4369 }
4370 }
4371 last_lazy_deopt_pc_ = masm()->pc_offset();
4372}
4373
4374
Ben Murdochb0fe1622011-05-05 13:52:32 +01004375void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004376 EnsureSpaceForLazyDeopt();
4377 ASSERT(instr->HasEnvironment());
4378 LEnvironment* env = instr->environment();
4379 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4380 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004381}
4382
4383
4384void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004385 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004386}
4387
4388
4389void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004390 Register object = ToRegister(instr->object());
4391 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004392 Register strict = scratch0();
4393 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4394 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004395 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4396 LPointerMap* pointers = instr->pointer_map();
Steve Block1e0659c2011-05-24 12:43:12 +01004397 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004398 SafepointGenerator safepoint_generator(
4399 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00004400 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4401}
4402
4403
4404void LCodeGen::DoIn(LIn* instr) {
4405 Register obj = ToRegister(instr->object());
4406 Register key = ToRegister(instr->key());
4407 __ Push(key, obj);
4408 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4409 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch257744e2011-11-30 15:57:28 +00004410 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004411 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00004412 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004413}
4414
4415
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004416void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004417 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4418 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4419 RecordSafepointWithLazyDeopt(
4420 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4421 ASSERT(instr->HasEnvironment());
4422 LEnvironment* env = instr->environment();
4423 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004424}
4425
4426
Ben Murdochb0fe1622011-05-05 13:52:32 +01004427void LCodeGen::DoStackCheck(LStackCheck* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004428 class DeferredStackCheck: public LDeferredCode {
4429 public:
4430 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4431 : LDeferredCode(codegen), instr_(instr) { }
4432 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4433 private:
4434 LStackCheck* instr_;
4435 };
4436
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004437 ASSERT(instr->HasEnvironment());
4438 LEnvironment* env = instr->environment();
4439 // There is no LLazyBailout instruction for stack-checks. We have to
4440 // prepare for lazy deoptimization explicitly here.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004441 if (instr->hydrogen()->is_function_entry()) {
4442 // Perform stack overflow check.
4443 Label done;
4444 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4445 __ cmp(sp, Operand(ip));
4446 __ b(hs, &done);
4447 StackCheckStub stub;
4448 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004449 EnsureSpaceForLazyDeopt();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004450 __ bind(&done);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004451 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4452 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004453 } else {
4454 ASSERT(instr->hydrogen()->is_backwards_branch());
4455 // Perform stack overflow check if this goto needs it before jumping.
4456 DeferredStackCheck* deferred_stack_check =
4457 new DeferredStackCheck(this, instr);
4458 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4459 __ cmp(sp, Operand(ip));
4460 __ b(lo, deferred_stack_check->entry());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004461 EnsureSpaceForLazyDeopt();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004462 __ bind(instr->done_label());
4463 deferred_stack_check->SetExit(instr->done_label());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004464 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4465 // Don't record a deoptimization index for the safepoint here.
4466 // This will be done explicitly when emitting call and the safepoint in
4467 // the deferred code.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004468 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004469}
4470
4471
4472void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004473 // This is a pseudo-instruction that ensures that the environment here is
4474 // properly registered for deoptimization and records the assembler's PC
4475 // offset.
4476 LEnvironment* environment = instr->environment();
4477 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4478 instr->SpilledDoubleRegisterArray());
4479
4480 // If the environment were already registered, we would have no way of
4481 // backpatching it with the spill slot operands.
4482 ASSERT(!environment->HasBeenRegistered());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004483 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Steve Block1e0659c2011-05-24 12:43:12 +01004484 ASSERT(osr_pc_offset_ == -1);
4485 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004486}
4487
4488
Ben Murdoch257744e2011-11-30 15:57:28 +00004489
4490
Ben Murdochb0fe1622011-05-05 13:52:32 +01004491#undef __
4492
4493} } // namespace v8::internal