blob: 82b80a2b802d685c5c8ba400bf9a1bb58da7b3ba [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +000043 Safepoint::DeoptMode mode)
Ben Murdochb0fe1622011-05-05 13:52:32 +010044 : codegen_(codegen),
45 pointers_(pointers),
Ben Murdoch2b4ba112012-01-20 14:57:15 +000046 deopt_mode_(mode) { }
Ben Murdochb0fe1622011-05-05 13:52:32 +010047 virtual ~SafepointGenerator() { }
48
Ben Murdoch2b4ba112012-01-20 14:57:15 +000049 virtual void BeforeCall(int call_size) const { }
Steve Block44f0eee2011-05-26 01:26:41 +010050
Ben Murdoch257744e2011-11-30 15:57:28 +000051 virtual void AfterCall() const {
Ben Murdoch2b4ba112012-01-20 14:57:15 +000052 codegen_->RecordSafepoint(pointers_, deopt_mode_);
Ben Murdochb0fe1622011-05-05 13:52:32 +010053 }
54
55 private:
56 LCodeGen* codegen_;
57 LPointerMap* pointers_;
Ben Murdoch2b4ba112012-01-20 14:57:15 +000058 Safepoint::DeoptMode deopt_mode_;
Ben Murdochb0fe1622011-05-05 13:52:32 +010059};
60
61
62#define __ masm()->
63
64bool LCodeGen::GenerateCode() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010065 HPhase phase("Z_Code generation", chunk());
Ben Murdochb0fe1622011-05-05 13:52:32 +010066 ASSERT(is_unused());
67 status_ = GENERATING;
68 CpuFeatures::Scope scope1(VFP3);
69 CpuFeatures::Scope scope2(ARMv7);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010070
71 CodeStub::GenerateFPStubs();
72
73 // Open a frame scope to indicate that there is a frame on the stack. The
74 // NONE indicates that the scope shouldn't actually generate code to set up
75 // the frame (that is done in GeneratePrologue).
76 FrameScope frame_scope(masm_, StackFrame::NONE);
77
Ben Murdochb0fe1622011-05-05 13:52:32 +010078 return GeneratePrologue() &&
79 GenerateBody() &&
80 GenerateDeferredCode() &&
Ben Murdoch257744e2011-11-30 15:57:28 +000081 GenerateDeoptJumpTable() &&
Ben Murdochb0fe1622011-05-05 13:52:32 +010082 GenerateSafepointTable();
83}
84
85
86void LCodeGen::FinishCode(Handle<Code> code) {
87 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000088 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010089 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010090 PopulateDeoptimizationData(code);
91}
92
93
94void LCodeGen::Abort(const char* format, ...) {
95 if (FLAG_trace_bailout) {
Ben Murdoch589d6972011-11-30 16:04:58 +000096 SmartArrayPointer<char> name(
97 info()->shared_info()->DebugName()->ToCString());
Ben Murdoche0cee9b2011-05-25 10:26:03 +010098 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +010099 va_list arguments;
100 va_start(arguments, format);
101 OS::VPrint(format, arguments);
102 va_end(arguments);
103 PrintF("\n");
104 }
105 status_ = ABORTED;
106}
107
108
109void LCodeGen::Comment(const char* format, ...) {
110 if (!FLAG_code_comments) return;
111 char buffer[4 * KB];
112 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
113 va_list arguments;
114 va_start(arguments, format);
115 builder.AddFormattedList(format, arguments);
116 va_end(arguments);
117
118 // Copy the string before recording it in the assembler to avoid
119 // issues when the stack allocated buffer goes out of scope.
120 size_t length = builder.position();
121 Vector<char> copy = Vector<char>::New(length + 1);
122 memcpy(copy.start(), builder.Finalize(), copy.length());
123 masm()->RecordComment(copy.start());
124}
125
126
127bool LCodeGen::GeneratePrologue() {
128 ASSERT(is_generating());
129
130#ifdef DEBUG
131 if (strlen(FLAG_stop_at) > 0 &&
132 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
133 __ stop("stop_at");
134 }
135#endif
136
137 // r1: Callee's JS function.
138 // cp: Callee's context.
139 // fp: Caller's frame pointer.
140 // lr: Caller's pc.
141
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000142 // Strict mode functions and builtins need to replace the receiver
143 // with undefined when called as functions (without an explicit
144 // receiver object). r5 is zero for method calls and non-zero for
145 // function calls.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100146 if (!info_->is_classic_mode() || info_->is_native()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000147 Label ok;
148 __ cmp(r5, Operand(0));
149 __ b(eq, &ok);
150 int receiver_offset = scope()->num_parameters() * kPointerSize;
151 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
152 __ str(r2, MemOperand(sp, receiver_offset));
153 __ bind(&ok);
154 }
155
Ben Murdochb0fe1622011-05-05 13:52:32 +0100156 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
157 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
158
159 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000160 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100161 if (slots > 0) {
162 if (FLAG_debug_code) {
163 __ mov(r0, Operand(slots));
164 __ mov(r2, Operand(kSlotsZapValue));
165 Label loop;
166 __ bind(&loop);
167 __ push(r2);
168 __ sub(r0, r0, Operand(1), SetCC);
169 __ b(ne, &loop);
170 } else {
171 __ sub(sp, sp, Operand(slots * kPointerSize));
172 }
173 }
174
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100175 // Possibly allocate a local context.
176 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
177 if (heap_slots > 0) {
178 Comment(";;; Allocate local context");
179 // Argument to NewContext is the function, which is in r1.
180 __ push(r1);
181 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
182 FastNewContextStub stub(heap_slots);
183 __ CallStub(&stub);
184 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000185 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100186 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000187 RecordSafepoint(Safepoint::kNoLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100188 // Context is returned in both r0 and cp. It replaces the context
189 // passed to us. It's saved in the stack and kept live in cp.
190 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
191 // Copy any necessary parameters into the context.
192 int num_parameters = scope()->num_parameters();
193 for (int i = 0; i < num_parameters; i++) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000194 Variable* var = scope()->parameter(i);
195 if (var->IsContextSlot()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100196 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
197 (num_parameters - 1 - i) * kPointerSize;
198 // Load parameter from stack.
199 __ ldr(r0, MemOperand(fp, parameter_offset));
200 // Store it in the context.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100201 MemOperand target = ContextOperand(cp, var->index());
202 __ str(r0, target);
203 // Update the write barrier. This clobbers r3 and r0.
204 __ RecordWriteContextSlot(
205 cp, target.offset(), r0, r3, kLRHasBeenSaved, kSaveFPRegs);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100206 }
207 }
208 Comment(";;; End allocate local context");
209 }
210
Ben Murdochb0fe1622011-05-05 13:52:32 +0100211 // Trace the call.
212 if (FLAG_trace) {
213 __ CallRuntime(Runtime::kTraceEnter, 0);
214 }
215 return !is_aborted();
216}
217
218
219bool LCodeGen::GenerateBody() {
220 ASSERT(is_generating());
221 bool emit_instructions = true;
222 for (current_instruction_ = 0;
223 !is_aborted() && current_instruction_ < instructions_->length();
224 current_instruction_++) {
225 LInstruction* instr = instructions_->at(current_instruction_);
226 if (instr->IsLabel()) {
227 LLabel* label = LLabel::cast(instr);
228 emit_instructions = !label->HasReplacement();
229 }
230
231 if (emit_instructions) {
232 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
233 instr->CompileToNative(this);
234 }
235 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000236 EnsureSpaceForLazyDeopt();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100237 return !is_aborted();
238}
239
240
Ben Murdochb0fe1622011-05-05 13:52:32 +0100241bool LCodeGen::GenerateDeferredCode() {
242 ASSERT(is_generating());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000243 if (deferred_.length() > 0) {
244 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
245 LDeferredCode* code = deferred_[i];
246 __ bind(code->entry());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100247 Comment(";;; Deferred code @%d: %s.",
248 code->instruction_index(),
249 code->instr()->Mnemonic());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000250 code->Generate();
251 __ jmp(code->exit());
252 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100253 }
254
Ben Murdoch257744e2011-11-30 15:57:28 +0000255 // Force constant pool emission at the end of the deferred code to make
256 // sure that no constant pools are emitted after.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100257 masm()->CheckConstPool(true, false);
258
Ben Murdoch257744e2011-11-30 15:57:28 +0000259 return !is_aborted();
260}
261
262
263bool LCodeGen::GenerateDeoptJumpTable() {
264 // Check that the jump table is accessible from everywhere in the function
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100265 // code, i.e. that offsets to the table can be encoded in the 24bit signed
Ben Murdoch257744e2011-11-30 15:57:28 +0000266 // immediate of a branch instruction.
267 // To simplify we consider the code size from the first instruction to the
268 // end of the jump table. We also don't consider the pc load delta.
269 // Each entry in the jump table generates one instruction and inlines one
270 // 32bit data after it.
271 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
272 deopt_jump_table_.length() * 2)) {
273 Abort("Generated code is too large");
274 }
275
276 // Block the constant pool emission during the jump table emission.
277 __ BlockConstPoolFor(deopt_jump_table_.length());
278 __ RecordComment("[ Deoptimisation jump table");
279 Label table_start;
280 __ bind(&table_start);
281 for (int i = 0; i < deopt_jump_table_.length(); i++) {
282 __ bind(&deopt_jump_table_[i].label);
283 __ ldr(pc, MemOperand(pc, Assembler::kInstrSize - Assembler::kPcLoadDelta));
284 __ dd(reinterpret_cast<uint32_t>(deopt_jump_table_[i].address));
285 }
286 ASSERT(masm()->InstructionsGeneratedSince(&table_start) ==
287 deopt_jump_table_.length() * 2);
288 __ RecordComment("]");
289
290 // The deoptimization jump table is the last part of the instruction
291 // sequence. Mark the generated code as done unless we bailed out.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100292 if (!is_aborted()) status_ = DONE;
293 return !is_aborted();
294}
295
296
297bool LCodeGen::GenerateSafepointTable() {
298 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +0000299 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100300 return !is_aborted();
301}
302
303
304Register LCodeGen::ToRegister(int index) const {
305 return Register::FromAllocationIndex(index);
306}
307
308
309DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
310 return DoubleRegister::FromAllocationIndex(index);
311}
312
313
314Register LCodeGen::ToRegister(LOperand* op) const {
315 ASSERT(op->IsRegister());
316 return ToRegister(op->index());
317}
318
319
320Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
321 if (op->IsRegister()) {
322 return ToRegister(op->index());
323 } else if (op->IsConstantOperand()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100324 LConstantOperand* const_op = LConstantOperand::cast(op);
325 Handle<Object> literal = chunk_->LookupLiteral(const_op);
326 Representation r = chunk_->LookupLiteralRepresentation(const_op);
327 if (r.IsInteger32()) {
328 ASSERT(literal->IsNumber());
329 __ mov(scratch, Operand(static_cast<int32_t>(literal->Number())));
330 } else if (r.IsDouble()) {
331 Abort("EmitLoadRegister: Unsupported double immediate.");
332 } else {
333 ASSERT(r.IsTagged());
334 if (literal->IsSmi()) {
335 __ mov(scratch, Operand(literal));
336 } else {
337 __ LoadHeapObject(scratch, Handle<HeapObject>::cast(literal));
338 }
339 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100340 return scratch;
341 } else if (op->IsStackSlot() || op->IsArgument()) {
342 __ ldr(scratch, ToMemOperand(op));
343 return scratch;
344 }
345 UNREACHABLE();
346 return scratch;
347}
348
349
350DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
351 ASSERT(op->IsDoubleRegister());
352 return ToDoubleRegister(op->index());
353}
354
355
356DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
357 SwVfpRegister flt_scratch,
358 DoubleRegister dbl_scratch) {
359 if (op->IsDoubleRegister()) {
360 return ToDoubleRegister(op->index());
361 } else if (op->IsConstantOperand()) {
362 LConstantOperand* const_op = LConstantOperand::cast(op);
363 Handle<Object> literal = chunk_->LookupLiteral(const_op);
364 Representation r = chunk_->LookupLiteralRepresentation(const_op);
365 if (r.IsInteger32()) {
366 ASSERT(literal->IsNumber());
367 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
368 __ vmov(flt_scratch, ip);
369 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
370 return dbl_scratch;
371 } else if (r.IsDouble()) {
372 Abort("unsupported double immediate");
373 } else if (r.IsTagged()) {
374 Abort("unsupported tagged immediate");
375 }
376 } else if (op->IsStackSlot() || op->IsArgument()) {
377 // TODO(regis): Why is vldr not taking a MemOperand?
378 // __ vldr(dbl_scratch, ToMemOperand(op));
379 MemOperand mem_op = ToMemOperand(op);
380 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
381 return dbl_scratch;
382 }
383 UNREACHABLE();
384 return dbl_scratch;
385}
386
387
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100388Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
389 Handle<Object> literal = chunk_->LookupLiteral(op);
390 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
391 return literal;
392}
393
394
395bool LCodeGen::IsInteger32(LConstantOperand* op) const {
396 return chunk_->LookupLiteralRepresentation(op).IsInteger32();
397}
398
399
Ben Murdochb0fe1622011-05-05 13:52:32 +0100400int LCodeGen::ToInteger32(LConstantOperand* op) const {
401 Handle<Object> value = chunk_->LookupLiteral(op);
402 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
403 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
404 value->Number());
405 return static_cast<int32_t>(value->Number());
406}
407
408
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100409double LCodeGen::ToDouble(LConstantOperand* op) const {
410 Handle<Object> value = chunk_->LookupLiteral(op);
411 return value->Number();
412}
413
414
Ben Murdochb0fe1622011-05-05 13:52:32 +0100415Operand LCodeGen::ToOperand(LOperand* op) {
416 if (op->IsConstantOperand()) {
417 LConstantOperand* const_op = LConstantOperand::cast(op);
418 Handle<Object> literal = chunk_->LookupLiteral(const_op);
419 Representation r = chunk_->LookupLiteralRepresentation(const_op);
420 if (r.IsInteger32()) {
421 ASSERT(literal->IsNumber());
422 return Operand(static_cast<int32_t>(literal->Number()));
423 } else if (r.IsDouble()) {
424 Abort("ToOperand Unsupported double immediate.");
425 }
426 ASSERT(r.IsTagged());
427 return Operand(literal);
428 } else if (op->IsRegister()) {
429 return Operand(ToRegister(op));
430 } else if (op->IsDoubleRegister()) {
431 Abort("ToOperand IsDoubleRegister unimplemented");
432 return Operand(0);
433 }
434 // Stack slots not implemented, use ToMemOperand instead.
435 UNREACHABLE();
436 return Operand(0);
437}
438
439
440MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100441 ASSERT(!op->IsRegister());
442 ASSERT(!op->IsDoubleRegister());
443 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
444 int index = op->index();
445 if (index >= 0) {
446 // Local or spill slot. Skip the frame pointer, function, and
447 // context in the fixed part of the frame.
448 return MemOperand(fp, -(index + 3) * kPointerSize);
449 } else {
450 // Incoming parameter. Skip the return address.
451 return MemOperand(fp, -(index - 1) * kPointerSize);
452 }
453}
454
455
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100456MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
457 ASSERT(op->IsDoubleStackSlot());
458 int index = op->index();
459 if (index >= 0) {
460 // Local or spill slot. Skip the frame pointer, function, context,
461 // and the first word of the double in the fixed part of the frame.
462 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
463 } else {
464 // Incoming parameter. Skip the return address and the first word of
465 // the double.
466 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
467 }
468}
469
470
Ben Murdochb8e0da22011-05-16 14:20:40 +0100471void LCodeGen::WriteTranslation(LEnvironment* environment,
472 Translation* translation) {
473 if (environment == NULL) return;
474
475 // The translation includes one command per value in the environment.
476 int translation_size = environment->values()->length();
477 // The output frame height does not include the parameters.
478 int height = translation_size - environment->parameter_count();
479
480 WriteTranslation(environment->outer(), translation);
481 int closure_id = DefineDeoptimizationLiteral(environment->closure());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100482 switch (environment->frame_type()) {
483 case JS_FUNCTION:
484 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
485 break;
486 case JS_CONSTRUCT:
487 translation->BeginConstructStubFrame(closure_id, translation_size);
488 break;
489 case ARGUMENTS_ADAPTOR:
490 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
491 break;
492 default:
493 UNREACHABLE();
494 }
Ben Murdochb8e0da22011-05-16 14:20:40 +0100495 for (int i = 0; i < translation_size; ++i) {
496 LOperand* value = environment->values()->at(i);
497 // spilled_registers_ and spilled_double_registers_ are either
498 // both NULL or both set.
499 if (environment->spilled_registers() != NULL && value != NULL) {
500 if (value->IsRegister() &&
501 environment->spilled_registers()[value->index()] != NULL) {
502 translation->MarkDuplicate();
503 AddToTranslation(translation,
504 environment->spilled_registers()[value->index()],
505 environment->HasTaggedValueAt(i));
506 } else if (
507 value->IsDoubleRegister() &&
508 environment->spilled_double_registers()[value->index()] != NULL) {
509 translation->MarkDuplicate();
510 AddToTranslation(
511 translation,
512 environment->spilled_double_registers()[value->index()],
513 false);
514 }
515 }
516
517 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
518 }
519}
520
521
Ben Murdochb0fe1622011-05-05 13:52:32 +0100522void LCodeGen::AddToTranslation(Translation* translation,
523 LOperand* op,
524 bool is_tagged) {
525 if (op == NULL) {
526 // TODO(twuerthinger): Introduce marker operands to indicate that this value
527 // is not present and must be reconstructed from the deoptimizer. Currently
528 // this is only used for the arguments object.
529 translation->StoreArgumentsObject();
530 } else if (op->IsStackSlot()) {
531 if (is_tagged) {
532 translation->StoreStackSlot(op->index());
533 } else {
534 translation->StoreInt32StackSlot(op->index());
535 }
536 } else if (op->IsDoubleStackSlot()) {
537 translation->StoreDoubleStackSlot(op->index());
538 } else if (op->IsArgument()) {
539 ASSERT(is_tagged);
Ben Murdoch257744e2011-11-30 15:57:28 +0000540 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100541 translation->StoreStackSlot(src_index);
542 } else if (op->IsRegister()) {
543 Register reg = ToRegister(op);
544 if (is_tagged) {
545 translation->StoreRegister(reg);
546 } else {
547 translation->StoreInt32Register(reg);
548 }
549 } else if (op->IsDoubleRegister()) {
550 DoubleRegister reg = ToDoubleRegister(op);
551 translation->StoreDoubleRegister(reg);
552 } else if (op->IsConstantOperand()) {
553 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
554 int src_index = DefineDeoptimizationLiteral(literal);
555 translation->StoreLiteral(src_index);
556 } else {
557 UNREACHABLE();
558 }
559}
560
561
562void LCodeGen::CallCode(Handle<Code> code,
563 RelocInfo::Mode mode,
564 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100565 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
566}
567
568
569void LCodeGen::CallCodeGeneric(Handle<Code> code,
570 RelocInfo::Mode mode,
571 LInstruction* instr,
572 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100573 ASSERT(instr != NULL);
574 LPointerMap* pointers = instr->pointer_map();
575 RecordPosition(pointers->position());
576 __ Call(code, mode);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000577 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
Ben Murdoch18a6f572011-07-25 17:16:09 +0100578
579 // Signal that we don't inline smi code before these stubs in the
580 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000581 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdoch18a6f572011-07-25 17:16:09 +0100582 code->kind() == Code::COMPARE_IC) {
583 __ nop();
584 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100585}
586
587
Steve Block44f0eee2011-05-26 01:26:41 +0100588void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100589 int num_arguments,
590 LInstruction* instr) {
591 ASSERT(instr != NULL);
592 LPointerMap* pointers = instr->pointer_map();
593 ASSERT(pointers != NULL);
594 RecordPosition(pointers->position());
595
596 __ CallRuntime(function, num_arguments);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000597 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100598}
599
600
Ben Murdoch8b112d22011-06-08 16:22:53 +0100601void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
602 int argc,
603 LInstruction* instr) {
604 __ CallRuntimeSaveDoubles(id);
605 RecordSafepointWithRegisters(
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000606 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100607}
608
609
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000610void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
611 Safepoint::DeoptMode mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100612 if (!environment->HasBeenRegistered()) {
613 // Physical stack frame layout:
614 // -x ............. -4 0 ..................................... y
615 // [incoming arguments] [spill slots] [pushed outgoing arguments]
616
617 // Layout of the environment:
618 // 0 ..................................................... size-1
619 // [parameters] [locals] [expression stack including arguments]
620
621 // Layout of the translation:
622 // 0 ........................................................ size - 1 + 4
623 // [expression stack including arguments] [locals] [4 words] [parameters]
624 // |>------------ translation_size ------------<|
625
626 int frame_count = 0;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100627 int jsframe_count = 0;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100628 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
629 ++frame_count;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100630 if (e->frame_type() == JS_FUNCTION) {
631 ++jsframe_count;
632 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100633 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100634 Translation translation(&translations_, frame_count, jsframe_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100635 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100636 int deoptimization_index = deoptimizations_.length();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000637 int pc_offset = masm()->pc_offset();
638 environment->Register(deoptimization_index,
639 translation.index(),
640 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100641 deoptimizations_.Add(environment);
642 }
643}
644
645
646void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000647 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100648 ASSERT(environment->HasBeenRegistered());
649 int id = environment->deoptimization_index();
650 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100651 if (entry == NULL) {
652 Abort("bailout was not prepared");
653 return;
654 }
655
656 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
657
658 if (FLAG_deopt_every_n_times == 1 &&
659 info_->shared_info()->opt_count() == id) {
660 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
661 return;
662 }
663
Ben Murdoch257744e2011-11-30 15:57:28 +0000664 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt", cc);
665
Steve Block1e0659c2011-05-24 12:43:12 +0100666 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100667 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
668 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000669 // We often have several deopts to the same entry, reuse the last
670 // jump entry if this is the case.
671 if (deopt_jump_table_.is_empty() ||
672 (deopt_jump_table_.last().address != entry)) {
673 deopt_jump_table_.Add(JumpTableEntry(entry));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100674 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000675 __ b(cc, &deopt_jump_table_.last().label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100676 }
677}
678
679
680void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
681 int length = deoptimizations_.length();
682 if (length == 0) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100683 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100684 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100685
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100686 Handle<ByteArray> translations = translations_.CreateByteArray();
687 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100688 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
689
690 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100691 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100692 for (int i = 0; i < deoptimization_literals_.length(); i++) {
693 literals->set(i, *deoptimization_literals_[i]);
694 }
695 data->SetLiteralArray(*literals);
696
697 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
698 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
699
700 // Populate the deoptimization entries.
701 for (int i = 0; i < length; i++) {
702 LEnvironment* env = deoptimizations_[i];
703 data->SetAstId(i, Smi::FromInt(env->ast_id()));
704 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
705 data->SetArgumentsStackHeight(i,
706 Smi::FromInt(env->arguments_stack_height()));
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000707 data->SetPc(i, Smi::FromInt(env->pc_offset()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100708 }
709 code->set_deoptimization_data(*data);
710}
711
712
713int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
714 int result = deoptimization_literals_.length();
715 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
716 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
717 }
718 deoptimization_literals_.Add(literal);
719 return result;
720}
721
722
723void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
724 ASSERT(deoptimization_literals_.length() == 0);
725
726 const ZoneList<Handle<JSFunction> >* inlined_closures =
727 chunk()->inlined_closures();
728
729 for (int i = 0, length = inlined_closures->length();
730 i < length;
731 i++) {
732 DefineDeoptimizationLiteral(inlined_closures->at(i));
733 }
734
735 inlined_function_count_ = deoptimization_literals_.length();
736}
737
738
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000739void LCodeGen::RecordSafepointWithLazyDeopt(
740 LInstruction* instr, SafepointMode safepoint_mode) {
741 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
742 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
743 } else {
744 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
745 RecordSafepointWithRegisters(
746 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
747 }
748}
749
750
Steve Block1e0659c2011-05-24 12:43:12 +0100751void LCodeGen::RecordSafepoint(
752 LPointerMap* pointers,
753 Safepoint::Kind kind,
754 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000755 Safepoint::DeoptMode deopt_mode) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100756 ASSERT(expected_safepoint_kind_ == kind);
757
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100758 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100759 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000760 kind, arguments, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100761 for (int i = 0; i < operands->length(); i++) {
762 LOperand* pointer = operands->at(i);
763 if (pointer->IsStackSlot()) {
764 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100765 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
766 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100767 }
768 }
Steve Block1e0659c2011-05-24 12:43:12 +0100769 if (kind & Safepoint::kWithRegisters) {
770 // Register cp always contains a pointer to the context.
771 safepoint.DefinePointerRegister(cp);
772 }
773}
774
775
776void LCodeGen::RecordSafepoint(LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000777 Safepoint::DeoptMode deopt_mode) {
778 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100779}
780
781
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000782void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100783 LPointerMap empty_pointers(RelocInfo::kNoPosition);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000784 RecordSafepoint(&empty_pointers, deopt_mode);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100785}
786
787
Ben Murdochb0fe1622011-05-05 13:52:32 +0100788void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
789 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000790 Safepoint::DeoptMode deopt_mode) {
791 RecordSafepoint(
792 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100793}
794
795
Ben Murdochb8e0da22011-05-16 14:20:40 +0100796void LCodeGen::RecordSafepointWithRegistersAndDoubles(
797 LPointerMap* pointers,
798 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000799 Safepoint::DeoptMode deopt_mode) {
800 RecordSafepoint(
801 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100802}
803
804
Ben Murdochb0fe1622011-05-05 13:52:32 +0100805void LCodeGen::RecordPosition(int position) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000806 if (position == RelocInfo::kNoPosition) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100807 masm()->positions_recorder()->RecordPosition(position);
808}
809
810
811void LCodeGen::DoLabel(LLabel* label) {
812 if (label->is_loop_header()) {
813 Comment(";;; B%d - LOOP entry", label->block_id());
814 } else {
815 Comment(";;; B%d", label->block_id());
816 }
817 __ bind(label->label());
818 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +0000819 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100820}
821
822
823void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100824 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100825}
826
827
828void LCodeGen::DoGap(LGap* gap) {
829 for (int i = LGap::FIRST_INNER_POSITION;
830 i <= LGap::LAST_INNER_POSITION;
831 i++) {
832 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
833 LParallelMove* move = gap->GetParallelMove(inner_pos);
834 if (move != NULL) DoParallelMove(move);
835 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100836}
837
838
Ben Murdoch257744e2011-11-30 15:57:28 +0000839void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
840 DoGap(instr);
841}
842
843
Ben Murdochb0fe1622011-05-05 13:52:32 +0100844void LCodeGen::DoParameter(LParameter* instr) {
845 // Nothing to do.
846}
847
848
849void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100850 ASSERT(ToRegister(instr->result()).is(r0));
851 switch (instr->hydrogen()->major_key()) {
852 case CodeStub::RegExpConstructResult: {
853 RegExpConstructResultStub stub;
854 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
855 break;
856 }
857 case CodeStub::RegExpExec: {
858 RegExpExecStub stub;
859 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
860 break;
861 }
862 case CodeStub::SubString: {
863 SubStringStub stub;
864 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
865 break;
866 }
Steve Block9fac8402011-05-12 15:51:54 +0100867 case CodeStub::NumberToString: {
868 NumberToStringStub stub;
869 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
870 break;
871 }
872 case CodeStub::StringAdd: {
873 StringAddStub stub(NO_STRING_ADD_FLAGS);
874 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
875 break;
876 }
877 case CodeStub::StringCompare: {
878 StringCompareStub stub;
879 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
880 break;
881 }
882 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100883 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100884 TranscendentalCacheStub stub(instr->transcendental_type(),
885 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100886 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100887 break;
888 }
889 default:
890 UNREACHABLE();
891 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100892}
893
894
895void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
896 // Nothing to do.
897}
898
899
900void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100901 if (instr->hydrogen()->HasPowerOf2Divisor()) {
902 Register dividend = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000903 Register result = ToRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +0100904
905 int32_t divisor =
906 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
907
908 if (divisor < 0) divisor = -divisor;
909
910 Label positive_dividend, done;
911 __ cmp(dividend, Operand(0));
912 __ b(pl, &positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000913 __ rsb(result, dividend, Operand(0));
914 __ and_(result, result, Operand(divisor - 1), SetCC);
Steve Block44f0eee2011-05-26 01:26:41 +0100915 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000916 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100917 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000918 __ rsb(result, result, Operand(0));
919 __ b(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100920 __ bind(&positive_dividend);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000921 __ and_(result, dividend, Operand(divisor - 1));
Steve Block44f0eee2011-05-26 01:26:41 +0100922 __ bind(&done);
923 return;
924 }
925
Ben Murdochb8e0da22011-05-16 14:20:40 +0100926 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100927 Register left = ToRegister(instr->InputAt(0));
928 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100929 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100930
Steve Block44f0eee2011-05-26 01:26:41 +0100931 Register scratch = scratch0();
932 Register scratch2 = ToRegister(instr->TempAt(0));
933 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
934 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
935 DwVfpRegister quotient = double_scratch0();
936
Steve Block44f0eee2011-05-26 01:26:41 +0100937 ASSERT(!dividend.is(divisor));
938 ASSERT(!dividend.is(quotient));
939 ASSERT(!divisor.is(quotient));
940 ASSERT(!scratch.is(left));
941 ASSERT(!scratch.is(right));
942 ASSERT(!scratch.is(result));
943
944 Label done, vfp_modulo, both_positive, right_negative;
945
Ben Murdochb8e0da22011-05-16 14:20:40 +0100946 // Check for x % 0.
947 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100948 __ cmp(right, Operand(0));
949 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100950 }
951
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000952 __ Move(result, left);
953
Steve Block44f0eee2011-05-26 01:26:41 +0100954 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100955 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100956 __ b(eq, &done);
957 // Preload right in a vfp register.
958 __ vmov(divisor.low(), right);
959 __ b(lt, &vfp_modulo);
960
961 __ cmp(left, Operand(right));
962 __ b(lt, &done);
963
964 // Check for (positive) power of two on the right hand side.
965 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
966 scratch,
967 &right_negative,
968 &both_positive);
969 // Perform modulo operation (scratch contains right - 1).
970 __ and_(result, scratch, Operand(left));
971 __ b(&done);
972
973 __ bind(&right_negative);
974 // Negate right. The sign of the divisor does not matter.
975 __ rsb(right, right, Operand(0));
976
977 __ bind(&both_positive);
978 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100979 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100980 // left hand side, the left hand side is the result.
981 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100982 __ mov(scratch, left);
983 for (int i = 0; i < kUnfolds; i++) {
984 // Check if the left hand side is less or equal than the
985 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100986 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100987 __ mov(result, scratch, LeaveCC, lt);
988 __ b(lt, &done);
989 // If not, reduce the left hand side by the right hand
990 // side and check again.
991 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
992 }
993
Steve Block44f0eee2011-05-26 01:26:41 +0100994 __ bind(&vfp_modulo);
995 // Load the arguments in VFP registers.
996 // The divisor value is preloaded before. Be careful that 'right' is only live
997 // on entry.
998 __ vmov(dividend.low(), left);
999 // From here on don't use right as it may have been reallocated (for example
1000 // to scratch2).
1001 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +01001002
Steve Block44f0eee2011-05-26 01:26:41 +01001003 __ vcvt_f64_s32(dividend, dividend.low());
1004 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001005
Steve Block44f0eee2011-05-26 01:26:41 +01001006 // We do not care about the sign of the divisor.
1007 __ vabs(divisor, divisor);
1008 // Compute the quotient and round it to a 32bit integer.
1009 __ vdiv(quotient, dividend, divisor);
1010 __ vcvt_s32_f64(quotient.low(), quotient);
1011 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001012
Steve Block44f0eee2011-05-26 01:26:41 +01001013 // Compute the remainder in result.
1014 DwVfpRegister double_scratch = dividend;
1015 __ vmul(double_scratch, divisor, quotient);
1016 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
1017 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001018
Steve Block44f0eee2011-05-26 01:26:41 +01001019 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1020 __ sub(result, left, scratch);
1021 } else {
1022 Label ok;
1023 // Check for -0.
1024 __ sub(scratch2, left, scratch, SetCC);
1025 __ b(ne, &ok);
1026 __ cmp(left, Operand(0));
1027 DeoptimizeIf(mi, instr->environment());
1028 __ bind(&ok);
1029 // Load the result and we are done.
1030 __ mov(result, scratch2);
1031 }
1032
Ben Murdochb8e0da22011-05-16 14:20:40 +01001033 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001034}
1035
1036
1037void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001038 class DeferredDivI: public LDeferredCode {
1039 public:
1040 DeferredDivI(LCodeGen* codegen, LDivI* instr)
1041 : LDeferredCode(codegen), instr_(instr) { }
1042 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001043 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001044 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001045 virtual LInstruction* instr() { return instr_; }
Ben Murdochb8e0da22011-05-16 14:20:40 +01001046 private:
1047 LDivI* instr_;
1048 };
1049
Steve Block1e0659c2011-05-24 12:43:12 +01001050 const Register left = ToRegister(instr->InputAt(0));
1051 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001052 const Register scratch = scratch0();
1053 const Register result = ToRegister(instr->result());
1054
1055 // Check for x / 0.
1056 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01001057 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001058 DeoptimizeIf(eq, instr->environment());
1059 }
1060
1061 // Check for (0 / -x) that will produce negative zero.
1062 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1063 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +01001064 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001065 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01001066 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001067 DeoptimizeIf(mi, instr->environment());
1068 __ bind(&left_not_zero);
1069 }
1070
1071 // Check for (-kMinInt / -1).
1072 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1073 Label left_not_min_int;
1074 __ cmp(left, Operand(kMinInt));
1075 __ b(ne, &left_not_min_int);
1076 __ cmp(right, Operand(-1));
1077 DeoptimizeIf(eq, instr->environment());
1078 __ bind(&left_not_min_int);
1079 }
1080
1081 Label done, deoptimize;
1082 // Test for a few common cases first.
1083 __ cmp(right, Operand(1));
1084 __ mov(result, left, LeaveCC, eq);
1085 __ b(eq, &done);
1086
1087 __ cmp(right, Operand(2));
1088 __ tst(left, Operand(1), eq);
1089 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1090 __ b(eq, &done);
1091
1092 __ cmp(right, Operand(4));
1093 __ tst(left, Operand(3), eq);
1094 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1095 __ b(eq, &done);
1096
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001097 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001098 // to be tagged to Smis. If that is not possible, deoptimize.
1099 DeferredDivI* deferred = new DeferredDivI(this, instr);
1100
1101 __ TrySmiTag(left, &deoptimize, scratch);
1102 __ TrySmiTag(right, &deoptimize, scratch);
1103
1104 __ b(al, deferred->entry());
1105 __ bind(deferred->exit());
1106
1107 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001108 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001109 __ SmiUntag(result);
1110 __ b(&done);
1111
1112 __ bind(&deoptimize);
1113 DeoptimizeIf(al, instr->environment());
1114 __ bind(&done);
1115}
1116
1117
Steve Block1e0659c2011-05-24 12:43:12 +01001118template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001119void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1120 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001121 Register left = ToRegister(instr->InputAt(0));
1122 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001123
Ben Murdoch8b112d22011-06-08 16:22:53 +01001124 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001125 // Move left to r1 and right to r0 for the stub call.
1126 if (left.is(r1)) {
1127 __ Move(r0, right);
1128 } else if (left.is(r0) && right.is(r1)) {
1129 __ Swap(r0, r1, r2);
1130 } else if (left.is(r0)) {
1131 ASSERT(!right.is(r1));
1132 __ mov(r1, r0);
1133 __ mov(r0, right);
1134 } else {
1135 ASSERT(!left.is(r0) && !right.is(r0));
1136 __ mov(r0, right);
1137 __ mov(r1, left);
1138 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001139 BinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001140 __ CallStub(&stub);
1141 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1142 0,
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001143 Safepoint::kNoLazyDeopt);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001144 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001145 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001146}
1147
1148
1149void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001150 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001151 Register result = ToRegister(instr->result());
1152 // Note that result may alias left.
Steve Block1e0659c2011-05-24 12:43:12 +01001153 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001154 LOperand* right_op = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001155
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001156 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1157 bool bailout_on_minus_zero =
1158 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001159
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001160 if (right_op->IsConstantOperand() && !can_overflow) {
1161 // Use optimized code for specific constants.
1162 int32_t constant = ToInteger32(LConstantOperand::cast(right_op));
1163
1164 if (bailout_on_minus_zero && (constant < 0)) {
1165 // The case of a null constant will be handled separately.
1166 // If constant is negative and left is null, the result should be -0.
1167 __ cmp(left, Operand(0));
1168 DeoptimizeIf(eq, instr->environment());
1169 }
1170
1171 switch (constant) {
1172 case -1:
1173 __ rsb(result, left, Operand(0));
1174 break;
1175 case 0:
1176 if (bailout_on_minus_zero) {
1177 // If left is strictly negative and the constant is null, the
1178 // result is -0. Deoptimize if required, otherwise return 0.
1179 __ cmp(left, Operand(0));
1180 DeoptimizeIf(mi, instr->environment());
1181 }
1182 __ mov(result, Operand(0));
1183 break;
1184 case 1:
1185 __ Move(result, left);
1186 break;
1187 default:
1188 // Multiplying by powers of two and powers of two plus or minus
1189 // one can be done faster with shifted operands.
1190 // For other constants we emit standard code.
1191 int32_t mask = constant >> 31;
1192 uint32_t constant_abs = (constant + mask) ^ mask;
1193
1194 if (IsPowerOf2(constant_abs) ||
1195 IsPowerOf2(constant_abs - 1) ||
1196 IsPowerOf2(constant_abs + 1)) {
1197 if (IsPowerOf2(constant_abs)) {
1198 int32_t shift = WhichPowerOf2(constant_abs);
1199 __ mov(result, Operand(left, LSL, shift));
1200 } else if (IsPowerOf2(constant_abs - 1)) {
1201 int32_t shift = WhichPowerOf2(constant_abs - 1);
1202 __ add(result, left, Operand(left, LSL, shift));
1203 } else if (IsPowerOf2(constant_abs + 1)) {
1204 int32_t shift = WhichPowerOf2(constant_abs + 1);
1205 __ rsb(result, left, Operand(left, LSL, shift));
1206 }
1207
1208 // Correct the sign of the result is the constant is negative.
1209 if (constant < 0) __ rsb(result, result, Operand(0));
1210
1211 } else {
1212 // Generate standard code.
1213 __ mov(ip, Operand(constant));
1214 __ mul(result, left, ip);
1215 }
1216 }
1217
Ben Murdochb0fe1622011-05-05 13:52:32 +01001218 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001219 Register right = EmitLoadRegister(right_op, scratch);
1220 if (bailout_on_minus_zero) {
1221 __ orr(ToRegister(instr->TempAt(0)), left, right);
1222 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001223
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001224 if (can_overflow) {
1225 // scratch:result = left * right.
1226 __ smull(result, scratch, left, right);
1227 __ cmp(scratch, Operand(result, ASR, 31));
1228 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001229 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001230 __ mul(result, left, right);
1231 }
1232
1233 if (bailout_on_minus_zero) {
1234 // Bail out if the result is supposed to be negative zero.
1235 Label done;
1236 __ cmp(result, Operand(0));
1237 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001238 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001239 DeoptimizeIf(mi, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001240 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001241 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001242 }
1243}
1244
1245
1246void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001247 LOperand* left_op = instr->InputAt(0);
1248 LOperand* right_op = instr->InputAt(1);
1249 ASSERT(left_op->IsRegister());
1250 Register left = ToRegister(left_op);
1251 Register result = ToRegister(instr->result());
1252 Operand right(no_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01001253
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001254 if (right_op->IsStackSlot() || right_op->IsArgument()) {
1255 right = Operand(EmitLoadRegister(right_op, ip));
Steve Block44f0eee2011-05-26 01:26:41 +01001256 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001257 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand());
1258 right = ToOperand(right_op);
Steve Block44f0eee2011-05-26 01:26:41 +01001259 }
1260
Ben Murdochb0fe1622011-05-05 13:52:32 +01001261 switch (instr->op()) {
1262 case Token::BIT_AND:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001263 __ and_(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001264 break;
1265 case Token::BIT_OR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001266 __ orr(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001267 break;
1268 case Token::BIT_XOR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001269 __ eor(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001270 break;
1271 default:
1272 UNREACHABLE();
1273 break;
1274 }
1275}
1276
1277
1278void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001279 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
1280 // result may alias either of them.
1281 LOperand* right_op = instr->InputAt(1);
1282 Register left = ToRegister(instr->InputAt(0));
1283 Register result = ToRegister(instr->result());
Steve Block9fac8402011-05-12 15:51:54 +01001284 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001285 if (right_op->IsRegister()) {
1286 // Mask the right_op operand.
1287 __ and_(scratch, ToRegister(right_op), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001288 switch (instr->op()) {
1289 case Token::SAR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001290 __ mov(result, Operand(left, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001291 break;
1292 case Token::SHR:
1293 if (instr->can_deopt()) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001294 __ mov(result, Operand(left, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001295 DeoptimizeIf(mi, instr->environment());
1296 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001297 __ mov(result, Operand(left, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001298 }
1299 break;
1300 case Token::SHL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001301 __ mov(result, Operand(left, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001302 break;
1303 default:
1304 UNREACHABLE();
1305 break;
1306 }
1307 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001308 // Mask the right_op operand.
1309 int value = ToInteger32(LConstantOperand::cast(right_op));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001310 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1311 switch (instr->op()) {
1312 case Token::SAR:
1313 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001314 __ mov(result, Operand(left, ASR, shift_count));
1315 } else {
1316 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001317 }
1318 break;
1319 case Token::SHR:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001320 if (shift_count != 0) {
1321 __ mov(result, Operand(left, LSR, shift_count));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001322 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001323 if (instr->can_deopt()) {
1324 __ tst(left, Operand(0x80000000));
1325 DeoptimizeIf(ne, instr->environment());
1326 }
1327 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001328 }
1329 break;
1330 case Token::SHL:
1331 if (shift_count != 0) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001332 __ mov(result, Operand(left, LSL, shift_count));
1333 } else {
1334 __ Move(result, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001335 }
1336 break;
1337 default:
1338 UNREACHABLE();
1339 break;
1340 }
1341 }
1342}
1343
1344
1345void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001346 LOperand* left = instr->InputAt(0);
1347 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001348 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001349 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1350 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1351
1352 if (right->IsStackSlot() || right->IsArgument()) {
1353 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001354 __ sub(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001355 } else {
1356 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001357 __ sub(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001358 }
1359
1360 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001361 DeoptimizeIf(vs, instr->environment());
1362 }
1363}
1364
1365
1366void LCodeGen::DoConstantI(LConstantI* instr) {
1367 ASSERT(instr->result()->IsRegister());
1368 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1369}
1370
1371
1372void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001373 ASSERT(instr->result()->IsDoubleRegister());
1374 DwVfpRegister result = ToDoubleRegister(instr->result());
1375 double v = instr->value();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001376 __ Vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001377}
1378
1379
1380void LCodeGen::DoConstantT(LConstantT* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001381 Handle<Object> value = instr->value();
1382 if (value->IsSmi()) {
1383 __ mov(ToRegister(instr->result()), Operand(value));
1384 } else {
1385 __ LoadHeapObject(ToRegister(instr->result()),
1386 Handle<HeapObject>::cast(value));
1387 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001388}
1389
1390
Steve Block9fac8402011-05-12 15:51:54 +01001391void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001392 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001393 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001394 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1395}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001396
Ben Murdochb0fe1622011-05-05 13:52:32 +01001397
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001398void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001399 Register result = ToRegister(instr->result());
1400 Register array = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001401 __ ldr(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001402}
1403
1404
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001405void LCodeGen::DoElementsKind(LElementsKind* instr) {
1406 Register result = ToRegister(instr->result());
1407 Register input = ToRegister(instr->InputAt(0));
1408
1409 // Load map into |result|.
1410 __ ldr(result, FieldMemOperand(input, HeapObject::kMapOffset));
1411 // Load the map's "bit field 2" into |result|. We only need the first byte,
1412 // but the following bit field extraction takes care of that anyway.
1413 __ ldr(result, FieldMemOperand(result, Map::kBitField2Offset));
1414 // Retrieve elements_kind from bit field 2.
1415 __ ubfx(result, result, Map::kElementsKindShift, Map::kElementsKindBitCount);
1416}
1417
1418
Ben Murdochb0fe1622011-05-05 13:52:32 +01001419void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001420 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001421 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001422 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001423 Label done;
1424
1425 // If the object is a smi return the object.
1426 __ tst(input, Operand(kSmiTagMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001427 __ Move(result, input, eq);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001428 __ b(eq, &done);
1429
1430 // If the object is not a value type, return the object.
1431 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001432 __ Move(result, input, ne);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001433 __ b(ne, &done);
1434 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1435
1436 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001437}
1438
1439
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001440void LCodeGen::DoDateField(LDateField* instr) {
1441 Register object = ToRegister(instr->InputAt(0));
1442 Register result = ToRegister(instr->result());
1443 Register scratch = ToRegister(instr->TempAt(0));
1444 Smi* index = instr->index();
1445 Label runtime, done;
1446 ASSERT(object.is(result));
1447 ASSERT(object.is(r0));
1448 ASSERT(!scratch.is(scratch0()));
1449 ASSERT(!scratch.is(object));
1450
1451#ifdef DEBUG
1452 __ AbortIfSmi(object);
1453 __ CompareObjectType(object, scratch, scratch, JS_DATE_TYPE);
1454 __ Assert(eq, "Trying to get date field from non-date.");
1455#endif
1456
1457 if (index->value() == 0) {
1458 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
1459 } else {
1460 if (index->value() < JSDate::kFirstUncachedField) {
1461 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1462 __ mov(scratch, Operand(stamp));
1463 __ ldr(scratch, MemOperand(scratch));
1464 __ ldr(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset));
1465 __ cmp(scratch, scratch0());
1466 __ b(ne, &runtime);
1467 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
1468 kPointerSize * index->value()));
1469 __ jmp(&done);
1470 }
1471 __ bind(&runtime);
1472 __ PrepareCallCFunction(2, scratch);
1473 __ mov(r1, Operand(index));
1474 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1475 __ bind(&done);
1476 }
1477}
1478
1479
Ben Murdochb0fe1622011-05-05 13:52:32 +01001480void LCodeGen::DoBitNotI(LBitNotI* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001481 Register input = ToRegister(instr->InputAt(0));
1482 Register result = ToRegister(instr->result());
1483 __ mvn(result, Operand(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001484}
1485
1486
1487void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001488 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001489 __ push(input_reg);
1490 CallRuntime(Runtime::kThrow, 1, instr);
1491
1492 if (FLAG_debug_code) {
1493 __ stop("Unreachable code.");
1494 }
1495}
1496
1497
1498void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001499 LOperand* left = instr->InputAt(0);
1500 LOperand* right = instr->InputAt(1);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001501 LOperand* result = instr->result();
Steve Block44f0eee2011-05-26 01:26:41 +01001502 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1503 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001504
Steve Block44f0eee2011-05-26 01:26:41 +01001505 if (right->IsStackSlot() || right->IsArgument()) {
1506 Register right_reg = EmitLoadRegister(right, ip);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001507 __ add(ToRegister(result), ToRegister(left), Operand(right_reg), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001508 } else {
1509 ASSERT(right->IsRegister() || right->IsConstantOperand());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001510 __ add(ToRegister(result), ToRegister(left), ToOperand(right), set_cond);
Steve Block44f0eee2011-05-26 01:26:41 +01001511 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001512
Steve Block44f0eee2011-05-26 01:26:41 +01001513 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001514 DeoptimizeIf(vs, instr->environment());
1515 }
1516}
1517
1518
1519void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001520 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1521 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001522 DoubleRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001523 switch (instr->op()) {
1524 case Token::ADD:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001525 __ vadd(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001526 break;
1527 case Token::SUB:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001528 __ vsub(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001529 break;
1530 case Token::MUL:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001531 __ vmul(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001532 break;
1533 case Token::DIV:
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001534 __ vdiv(result, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001535 break;
1536 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001537 // Save r0-r3 on the stack.
1538 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1539
Ben Murdoch257744e2011-11-30 15:57:28 +00001540 __ PrepareCallCFunction(0, 2, scratch0());
1541 __ SetCallCDoubleArguments(left, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001542 __ CallCFunction(
Ben Murdoch257744e2011-11-30 15:57:28 +00001543 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1544 0, 2);
Steve Block1e0659c2011-05-24 12:43:12 +01001545 // Move the result in the double result register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001546 __ GetCFunctionDoubleResult(result);
Steve Block1e0659c2011-05-24 12:43:12 +01001547
1548 // Restore r0-r3.
1549 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001550 break;
1551 }
1552 default:
1553 UNREACHABLE();
1554 break;
1555 }
1556}
1557
1558
1559void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001560 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1561 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001562 ASSERT(ToRegister(instr->result()).is(r0));
1563
Ben Murdoch257744e2011-11-30 15:57:28 +00001564 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001565 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001566 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001567}
1568
1569
1570int LCodeGen::GetNextEmittedBlock(int block) {
1571 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1572 LLabel* label = chunk_->GetLabel(i);
1573 if (!label->HasReplacement()) return i;
1574 }
1575 return -1;
1576}
1577
1578
1579void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1580 int next_block = GetNextEmittedBlock(current_block_);
1581 right_block = chunk_->LookupDestination(right_block);
1582 left_block = chunk_->LookupDestination(left_block);
1583
1584 if (right_block == left_block) {
1585 EmitGoto(left_block);
1586 } else if (left_block == next_block) {
1587 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1588 } else if (right_block == next_block) {
1589 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1590 } else {
1591 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1592 __ b(chunk_->GetAssemblyLabel(right_block));
1593 }
1594}
1595
1596
1597void LCodeGen::DoBranch(LBranch* instr) {
1598 int true_block = chunk_->LookupDestination(instr->true_block_id());
1599 int false_block = chunk_->LookupDestination(instr->false_block_id());
1600
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001601 Representation r = instr->hydrogen()->value()->representation();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001602 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001603 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001604 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001605 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001606 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001607 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001608 Register scratch = scratch0();
1609
Ben Murdochb8e0da22011-05-16 14:20:40 +01001610 // Test the double value. Zero and NaN are false.
1611 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1612 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch6d7cb002011-08-04 19:25:22 +01001613 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001614 } else {
1615 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001616 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001617 HType type = instr->hydrogen()->value()->type();
1618 if (type.IsBoolean()) {
1619 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001620 EmitBranch(true_block, false_block, eq);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001621 } else if (type.IsSmi()) {
1622 __ cmp(reg, Operand(0));
1623 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001624 } else {
1625 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1626 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1627
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001628 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1629 // Avoid deopts in the case where we've never executed this path before.
1630 if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001631
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001632 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1633 // undefined -> false.
1634 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1635 __ b(eq, false_label);
1636 }
1637 if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1638 // Boolean -> its value.
1639 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1640 __ b(eq, true_label);
1641 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1642 __ b(eq, false_label);
1643 }
1644 if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1645 // 'null' -> false.
1646 __ CompareRoot(reg, Heap::kNullValueRootIndex);
1647 __ b(eq, false_label);
1648 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001649
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001650 if (expected.Contains(ToBooleanStub::SMI)) {
1651 // Smis: 0 -> false, all other -> true.
1652 __ cmp(reg, Operand(0));
1653 __ b(eq, false_label);
1654 __ JumpIfSmi(reg, true_label);
1655 } else if (expected.NeedsMap()) {
1656 // If we need a map later and have a Smi -> deopt.
1657 __ tst(reg, Operand(kSmiTagMask));
1658 DeoptimizeIf(eq, instr->environment());
1659 }
1660
1661 const Register map = scratch0();
1662 if (expected.NeedsMap()) {
1663 __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset));
1664
1665 if (expected.CanBeUndetectable()) {
1666 // Undetectable -> false.
1667 __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
1668 __ tst(ip, Operand(1 << Map::kIsUndetectable));
1669 __ b(ne, false_label);
1670 }
1671 }
1672
1673 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1674 // spec object -> true.
1675 __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
1676 __ b(ge, true_label);
1677 }
1678
1679 if (expected.Contains(ToBooleanStub::STRING)) {
1680 // String value -> false iff empty.
1681 Label not_string;
1682 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
1683 __ b(ge, &not_string);
1684 __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset));
1685 __ cmp(ip, Operand(0));
1686 __ b(ne, true_label);
1687 __ b(false_label);
1688 __ bind(&not_string);
1689 }
1690
1691 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1692 // heap number -> false iff +0, -0, or NaN.
1693 DoubleRegister dbl_scratch = double_scratch0();
1694 Label not_heap_number;
1695 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
1696 __ b(ne, &not_heap_number);
1697 __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
1698 __ VFPCompareAndSetFlags(dbl_scratch, 0.0);
1699 __ b(vs, false_label); // NaN -> false.
1700 __ b(eq, false_label); // +0, -0 -> false.
1701 __ b(true_label);
1702 __ bind(&not_heap_number);
1703 }
1704
1705 // We've seen something for the first time -> deopt.
1706 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001707 }
1708 }
1709}
1710
1711
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001712void LCodeGen::EmitGoto(int block) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001713 block = chunk_->LookupDestination(block);
1714 int next_block = GetNextEmittedBlock(current_block_);
1715 if (block != next_block) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001716 __ jmp(chunk_->GetAssemblyLabel(block));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001717 }
1718}
1719
1720
Ben Murdochb0fe1622011-05-05 13:52:32 +01001721void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001722 EmitGoto(instr->block_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001723}
1724
1725
1726Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001727 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001728 switch (op) {
1729 case Token::EQ:
1730 case Token::EQ_STRICT:
1731 cond = eq;
1732 break;
1733 case Token::LT:
1734 cond = is_unsigned ? lo : lt;
1735 break;
1736 case Token::GT:
1737 cond = is_unsigned ? hi : gt;
1738 break;
1739 case Token::LTE:
1740 cond = is_unsigned ? ls : le;
1741 break;
1742 case Token::GTE:
1743 cond = is_unsigned ? hs : ge;
1744 break;
1745 case Token::IN:
1746 case Token::INSTANCEOF:
1747 default:
1748 UNREACHABLE();
1749 }
1750 return cond;
1751}
1752
1753
Ben Murdochb0fe1622011-05-05 13:52:32 +01001754void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001755 LOperand* left = instr->InputAt(0);
1756 LOperand* right = instr->InputAt(1);
1757 int false_block = chunk_->LookupDestination(instr->false_block_id());
1758 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001759 Condition cond = TokenToCondition(instr->op(), false);
Steve Block1e0659c2011-05-24 12:43:12 +01001760
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001761 if (left->IsConstantOperand() && right->IsConstantOperand()) {
1762 // We can statically evaluate the comparison.
1763 double left_val = ToDouble(LConstantOperand::cast(left));
1764 double right_val = ToDouble(LConstantOperand::cast(right));
1765 int next_block =
1766 EvalComparison(instr->op(), left_val, right_val) ? true_block
1767 : false_block;
1768 EmitGoto(next_block);
Steve Block1e0659c2011-05-24 12:43:12 +01001769 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001770 if (instr->is_double()) {
1771 // Compare left and right operands as doubles and load the
1772 // resulting flags into the normal status register.
1773 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1774 // If a NaN is involved, i.e. the result is unordered (V set),
1775 // jump to false block label.
1776 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1777 } else {
1778 if (right->IsConstantOperand()) {
1779 __ cmp(ToRegister(left),
1780 Operand(ToInteger32(LConstantOperand::cast(right))));
1781 } else if (left->IsConstantOperand()) {
1782 __ cmp(ToRegister(right),
1783 Operand(ToInteger32(LConstantOperand::cast(left))));
1784 // We transposed the operands. Reverse the condition.
1785 cond = ReverseCondition(cond);
1786 } else {
1787 __ cmp(ToRegister(left), ToRegister(right));
1788 }
1789 }
1790 EmitBranch(true_block, false_block, cond);
Steve Block1e0659c2011-05-24 12:43:12 +01001791 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001792}
1793
1794
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001795void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001796 Register left = ToRegister(instr->InputAt(0));
1797 Register right = ToRegister(instr->InputAt(1));
1798 int false_block = chunk_->LookupDestination(instr->false_block_id());
1799 int true_block = chunk_->LookupDestination(instr->true_block_id());
1800
1801 __ cmp(left, Operand(right));
1802 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001803}
1804
1805
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001806void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001807 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001808 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001809 int false_block = chunk_->LookupDestination(instr->false_block_id());
Ben Murdoch257744e2011-11-30 15:57:28 +00001810
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001811 __ cmp(left, Operand(instr->hydrogen()->right()));
Ben Murdoch257744e2011-11-30 15:57:28 +00001812 EmitBranch(true_block, false_block, eq);
1813}
1814
1815
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001816void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001817 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001818 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch85b71792012-04-11 18:30:58 +01001819 int false_block = chunk_->LookupDestination(instr->false_block_id());
1820
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001821 // If the expression is known to be untagged or a smi, then it's definitely
1822 // not null, and it can't be a an undetectable object.
1823 if (instr->hydrogen()->representation().IsSpecialization() ||
1824 instr->hydrogen()->type().IsSmi()) {
1825 EmitGoto(false_block);
1826 return;
1827 }
1828
1829 int true_block = chunk_->LookupDestination(instr->true_block_id());
1830 Heap::RootListIndex nil_value = instr->nil() == kNullValue ?
1831 Heap::kNullValueRootIndex :
1832 Heap::kUndefinedValueRootIndex;
1833 __ LoadRoot(ip, nil_value);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001834 __ cmp(reg, ip);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001835 if (instr->kind() == kStrictEquality) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001836 EmitBranch(true_block, false_block, eq);
1837 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001838 Heap::RootListIndex other_nil_value = instr->nil() == kNullValue ?
1839 Heap::kUndefinedValueRootIndex :
1840 Heap::kNullValueRootIndex;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001841 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1842 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1843 __ b(eq, true_label);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001844 __ LoadRoot(ip, other_nil_value);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001845 __ cmp(reg, ip);
1846 __ b(eq, true_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001847 __ JumpIfSmi(reg, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001848 // Check for undetectable objects by looking in the bit field in
1849 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001850 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1851 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1852 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1853 EmitBranch(true_block, false_block, ne);
1854 }
1855}
1856
1857
1858Condition LCodeGen::EmitIsObject(Register input,
1859 Register temp1,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001860 Label* is_not_object,
1861 Label* is_object) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001862 Register temp2 = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001863 __ JumpIfSmi(input, is_not_object);
1864
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001865 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
1866 __ cmp(input, temp2);
Steve Block1e0659c2011-05-24 12:43:12 +01001867 __ b(eq, is_object);
1868
1869 // Load map.
1870 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1871 // Undetectable objects behave like undefined.
1872 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1873 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1874 __ b(ne, is_not_object);
1875
1876 // Load instance type and check that it is in object type range.
1877 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001878 __ cmp(temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001879 __ b(lt, is_not_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001880 __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
Steve Block1e0659c2011-05-24 12:43:12 +01001881 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001882}
1883
1884
Ben Murdochb0fe1622011-05-05 13:52:32 +01001885void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001886 Register reg = ToRegister(instr->InputAt(0));
1887 Register temp1 = ToRegister(instr->TempAt(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001888
1889 int true_block = chunk_->LookupDestination(instr->true_block_id());
1890 int false_block = chunk_->LookupDestination(instr->false_block_id());
1891 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1892 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1893
1894 Condition true_cond =
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001895 EmitIsObject(reg, temp1, false_label, true_label);
Steve Block1e0659c2011-05-24 12:43:12 +01001896
1897 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001898}
1899
1900
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001901Condition LCodeGen::EmitIsString(Register input,
1902 Register temp1,
1903 Label* is_not_string) {
1904 __ JumpIfSmi(input, is_not_string);
1905 __ CompareObjectType(input, temp1, temp1, FIRST_NONSTRING_TYPE);
1906
1907 return lt;
1908}
1909
1910
1911void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1912 Register reg = ToRegister(instr->InputAt(0));
1913 Register temp1 = ToRegister(instr->TempAt(0));
1914
1915 int true_block = chunk_->LookupDestination(instr->true_block_id());
1916 int false_block = chunk_->LookupDestination(instr->false_block_id());
1917 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1918
1919 Condition true_cond =
1920 EmitIsString(reg, temp1, false_label);
1921
1922 EmitBranch(true_block, false_block, true_cond);
1923}
1924
1925
Ben Murdochb0fe1622011-05-05 13:52:32 +01001926void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1927 int true_block = chunk_->LookupDestination(instr->true_block_id());
1928 int false_block = chunk_->LookupDestination(instr->false_block_id());
1929
Steve Block1e0659c2011-05-24 12:43:12 +01001930 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001931 __ tst(input_reg, Operand(kSmiTagMask));
1932 EmitBranch(true_block, false_block, eq);
1933}
1934
1935
Ben Murdoch257744e2011-11-30 15:57:28 +00001936void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1937 Register input = ToRegister(instr->InputAt(0));
1938 Register temp = ToRegister(instr->TempAt(0));
1939
1940 int true_block = chunk_->LookupDestination(instr->true_block_id());
1941 int false_block = chunk_->LookupDestination(instr->false_block_id());
1942
1943 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1944 __ ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
1945 __ ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
1946 __ tst(temp, Operand(1 << Map::kIsUndetectable));
1947 EmitBranch(true_block, false_block, ne);
1948}
1949
1950
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001951static Condition ComputeCompareCondition(Token::Value op) {
1952 switch (op) {
1953 case Token::EQ_STRICT:
1954 case Token::EQ:
1955 return eq;
1956 case Token::LT:
1957 return lt;
1958 case Token::GT:
1959 return gt;
1960 case Token::LTE:
1961 return le;
1962 case Token::GTE:
1963 return ge;
1964 default:
1965 UNREACHABLE();
1966 return kNoCondition;
1967 }
1968}
1969
1970
1971void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
1972 Token::Value op = instr->op();
1973 int true_block = chunk_->LookupDestination(instr->true_block_id());
1974 int false_block = chunk_->LookupDestination(instr->false_block_id());
1975
1976 Handle<Code> ic = CompareIC::GetUninitialized(op);
1977 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1978 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
1979
1980 Condition condition = ComputeCompareCondition(op);
1981
1982 EmitBranch(true_block, false_block, condition);
1983}
1984
1985
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001986static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001987 InstanceType from = instr->from();
1988 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001989 if (from == FIRST_TYPE) return to;
1990 ASSERT(from == to || to == LAST_TYPE);
1991 return from;
1992}
1993
1994
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001995static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001996 InstanceType from = instr->from();
1997 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001998 if (from == to) return eq;
1999 if (to == LAST_TYPE) return hs;
2000 if (from == FIRST_TYPE) return ls;
2001 UNREACHABLE();
2002 return eq;
2003}
2004
2005
Ben Murdochb0fe1622011-05-05 13:52:32 +01002006void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002007 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01002008 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002009
2010 int true_block = chunk_->LookupDestination(instr->true_block_id());
2011 int false_block = chunk_->LookupDestination(instr->false_block_id());
2012
2013 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2014
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002015 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002016
Steve Block1e0659c2011-05-24 12:43:12 +01002017 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
2018 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002019}
2020
2021
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002022void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
2023 Register input = ToRegister(instr->InputAt(0));
2024 Register result = ToRegister(instr->result());
2025
2026 if (FLAG_debug_code) {
2027 __ AbortIfNotString(input);
2028 }
2029
2030 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
2031 __ IndexFromHash(result, result);
2032}
2033
2034
Ben Murdochb0fe1622011-05-05 13:52:32 +01002035void LCodeGen::DoHasCachedArrayIndexAndBranch(
2036 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002037 Register input = ToRegister(instr->InputAt(0));
2038 Register scratch = scratch0();
2039
2040 int true_block = chunk_->LookupDestination(instr->true_block_id());
2041 int false_block = chunk_->LookupDestination(instr->false_block_id());
2042
2043 __ ldr(scratch,
2044 FieldMemOperand(input, String::kHashFieldOffset));
2045 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
2046 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002047}
2048
2049
Ben Murdochb8e0da22011-05-16 14:20:40 +01002050// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002051// the temp registers, but not the input.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002052void LCodeGen::EmitClassOfTest(Label* is_true,
2053 Label* is_false,
2054 Handle<String>class_name,
2055 Register input,
2056 Register temp,
2057 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002058 ASSERT(!input.is(temp));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002059 ASSERT(!input.is(temp2));
2060 ASSERT(!temp.is(temp2));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002061
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002062 __ JumpIfSmi(input, is_false);
2063
Ben Murdochb8e0da22011-05-16 14:20:40 +01002064 if (class_name->IsEqualTo(CStrVector("Function"))) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002065 // Assuming the following assertions, we can use the same compares to test
2066 // for both being a function type and being in the object type range.
2067 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2068 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2069 FIRST_SPEC_OBJECT_TYPE + 1);
2070 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2071 LAST_SPEC_OBJECT_TYPE - 1);
2072 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
2073 __ CompareObjectType(input, temp, temp2, FIRST_SPEC_OBJECT_TYPE);
2074 __ b(lt, is_false);
2075 __ b(eq, is_true);
2076 __ cmp(temp2, Operand(LAST_SPEC_OBJECT_TYPE));
2077 __ b(eq, is_true);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002078 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002079 // Faster code path to avoid two compares: subtract lower bound from the
2080 // actual type and do a signed compare with the width of the type range.
2081 __ ldr(temp, FieldMemOperand(input, HeapObject::kMapOffset));
2082 __ ldrb(temp2, FieldMemOperand(temp, Map::kInstanceTypeOffset));
2083 __ sub(temp2, temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2084 __ cmp(temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
2085 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2086 __ b(gt, is_false);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002087 }
2088
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002089 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
Ben Murdochb8e0da22011-05-16 14:20:40 +01002090 // Check if the constructor in the map is a function.
2091 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
2092
Ben Murdochb8e0da22011-05-16 14:20:40 +01002093 // Objects with a non-function constructor have class 'Object'.
2094 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
2095 if (class_name->IsEqualTo(CStrVector("Object"))) {
2096 __ b(ne, is_true);
2097 } else {
2098 __ b(ne, is_false);
2099 }
2100
2101 // temp now contains the constructor function. Grab the
2102 // instance class name from there.
2103 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
2104 __ ldr(temp, FieldMemOperand(temp,
2105 SharedFunctionInfo::kInstanceClassNameOffset));
2106 // The class name we are testing against is a symbol because it's a literal.
2107 // The name in the constructor is a symbol because of the way the context is
2108 // booted. This routine isn't expected to work for random API-created
2109 // classes and it doesn't have to because you can't access it with natives
2110 // syntax. Since both sides are symbols it is sufficient to use an identity
2111 // comparison.
2112 __ cmp(temp, Operand(class_name));
2113 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002114}
2115
2116
Ben Murdochb0fe1622011-05-05 13:52:32 +01002117void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002118 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002119 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01002120 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002121 Handle<String> class_name = instr->hydrogen()->class_name();
2122
2123 int true_block = chunk_->LookupDestination(instr->true_block_id());
2124 int false_block = chunk_->LookupDestination(instr->false_block_id());
2125
2126 Label* true_label = chunk_->GetAssemblyLabel(true_block);
2127 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2128
2129 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
2130
2131 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002132}
2133
2134
2135void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002136 Register reg = ToRegister(instr->InputAt(0));
2137 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002138 int true_block = instr->true_block_id();
2139 int false_block = instr->false_block_id();
2140
2141 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
2142 __ cmp(temp, Operand(instr->map()));
2143 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002144}
2145
2146
2147void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002148 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
2149 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01002150
Ben Murdochb0fe1622011-05-05 13:52:32 +01002151 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2152 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2153
Steve Block44f0eee2011-05-26 01:26:41 +01002154 __ cmp(r0, Operand(0));
2155 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
2156 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002157}
2158
2159
Ben Murdoch086aeea2011-05-13 15:57:08 +01002160void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002161 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2162 public:
2163 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2164 LInstanceOfKnownGlobal* instr)
2165 : LDeferredCode(codegen), instr_(instr) { }
2166 virtual void Generate() {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002167 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
Steve Block1e0659c2011-05-24 12:43:12 +01002168 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002169 virtual LInstruction* instr() { return instr_; }
Steve Block1e0659c2011-05-24 12:43:12 +01002170 Label* map_check() { return &map_check_; }
Steve Block1e0659c2011-05-24 12:43:12 +01002171 private:
2172 LInstanceOfKnownGlobal* instr_;
2173 Label map_check_;
2174 };
2175
2176 DeferredInstanceOfKnownGlobal* deferred;
2177 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2178
2179 Label done, false_result;
2180 Register object = ToRegister(instr->InputAt(0));
2181 Register temp = ToRegister(instr->TempAt(0));
2182 Register result = ToRegister(instr->result());
2183
2184 ASSERT(object.is(r0));
2185 ASSERT(result.is(r0));
2186
2187 // A Smi is not instance of anything.
2188 __ JumpIfSmi(object, &false_result);
2189
2190 // This is the inlined call site instanceof cache. The two occurences of the
2191 // hole value will be patched to the last map/result pair generated by the
2192 // instanceof stub.
2193 Label cache_miss;
2194 Register map = temp;
2195 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2196 __ bind(deferred->map_check()); // Label for calculating code patching.
2197 // We use Factory::the_hole_value() on purpose instead of loading from the
2198 // root array to force relocation to be able to later patch with
2199 // the cached map.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002200 Handle<JSGlobalPropertyCell> cell =
2201 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
2202 __ mov(ip, Operand(Handle<Object>(cell)));
2203 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002204 __ cmp(map, Operand(ip));
2205 __ b(ne, &cache_miss);
2206 // We use Factory::the_hole_value() on purpose instead of loading from the
2207 // root array to force relocation to be able to later patch
2208 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002209 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002210 __ b(&done);
2211
2212 // The inlined call site cache did not match. Check null and string before
2213 // calling the deferred code.
2214 __ bind(&cache_miss);
2215 // Null is not instance of anything.
2216 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2217 __ cmp(object, Operand(ip));
2218 __ b(eq, &false_result);
2219
2220 // String values is not instance of anything.
2221 Condition is_string = masm_->IsObjectStringType(object, temp);
2222 __ b(is_string, &false_result);
2223
2224 // Go to the deferred code.
2225 __ b(deferred->entry());
2226
2227 __ bind(&false_result);
2228 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2229
2230 // Here result has either true or false. Deferred code also produces true or
2231 // false object.
2232 __ bind(deferred->exit());
2233 __ bind(&done);
2234}
2235
2236
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002237void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2238 Label* map_check) {
Steve Block1e0659c2011-05-24 12:43:12 +01002239 Register result = ToRegister(instr->result());
2240 ASSERT(result.is(r0));
2241
2242 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2243 flags = static_cast<InstanceofStub::Flags>(
2244 flags | InstanceofStub::kArgsInRegisters);
2245 flags = static_cast<InstanceofStub::Flags>(
2246 flags | InstanceofStub::kCallSiteInlineCheck);
2247 flags = static_cast<InstanceofStub::Flags>(
2248 flags | InstanceofStub::kReturnTrueFalseObject);
2249 InstanceofStub stub(flags);
2250
Ben Murdoch8b112d22011-06-08 16:22:53 +01002251 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002252
2253 // Get the temp register reserved by the instruction. This needs to be r4 as
2254 // its slot of the pushing of safepoint registers is used to communicate the
2255 // offset to the location of the map check.
2256 Register temp = ToRegister(instr->TempAt(0));
2257 ASSERT(temp.is(r4));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002258 __ LoadHeapObject(InstanceofStub::right(), instr->function());
Steve Block1e0659c2011-05-24 12:43:12 +01002259 static const int kAdditionalDelta = 4;
2260 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2261 Label before_push_delta;
2262 __ bind(&before_push_delta);
2263 __ BlockConstPoolFor(kAdditionalDelta);
2264 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002265 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002266 CallCodeGeneric(stub.GetCode(),
2267 RelocInfo::CODE_TARGET,
2268 instr,
2269 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002270 ASSERT(instr->HasDeoptimizationEnvironment());
2271 LEnvironment* env = instr->deoptimization_environment();
2272 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Steve Block1e0659c2011-05-24 12:43:12 +01002273 // Put the result value into the result register slot and
2274 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002275 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002276}
2277
Ben Murdochb0fe1622011-05-05 13:52:32 +01002278
Ben Murdochb0fe1622011-05-05 13:52:32 +01002279void LCodeGen::DoCmpT(LCmpT* instr) {
2280 Token::Value op = instr->op();
2281
2282 Handle<Code> ic = CompareIC::GetUninitialized(op);
2283 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002284 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002285
2286 Condition condition = ComputeCompareCondition(op);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002287 __ LoadRoot(ToRegister(instr->result()),
2288 Heap::kTrueValueRootIndex,
2289 condition);
2290 __ LoadRoot(ToRegister(instr->result()),
2291 Heap::kFalseValueRootIndex,
2292 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002293}
2294
2295
Ben Murdochb0fe1622011-05-05 13:52:32 +01002296void LCodeGen::DoReturn(LReturn* instr) {
2297 if (FLAG_trace) {
2298 // Push the return value on the stack as the parameter.
2299 // Runtime::TraceExit returns its parameter in r0.
2300 __ push(r0);
2301 __ CallRuntime(Runtime::kTraceExit, 1);
2302 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002303 int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002304 __ mov(sp, fp);
2305 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2306 __ add(sp, sp, Operand(sp_delta));
2307 __ Jump(lr);
2308}
2309
2310
Ben Murdoch8b112d22011-06-08 16:22:53 +01002311void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002312 Register result = ToRegister(instr->result());
2313 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2314 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002315 if (instr->hydrogen()->RequiresHoleCheck()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002316 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2317 __ cmp(result, ip);
2318 DeoptimizeIf(eq, instr->environment());
2319 }
2320}
2321
2322
Ben Murdoch8b112d22011-06-08 16:22:53 +01002323void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2324 ASSERT(ToRegister(instr->global_object()).is(r0));
2325 ASSERT(ToRegister(instr->result()).is(r0));
2326
2327 __ mov(r2, Operand(instr->name()));
2328 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2329 : RelocInfo::CODE_TARGET_CONTEXT;
2330 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2331 CallCode(ic, mode, instr);
2332}
2333
2334
2335void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002336 Register value = ToRegister(instr->value());
2337 Register cell = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01002338
2339 // Load the cell.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002340 __ mov(cell, Operand(instr->hydrogen()->cell()));
Steve Block1e0659c2011-05-24 12:43:12 +01002341
2342 // If the cell we are storing to contains the hole it could have
2343 // been deleted from the property dictionary. In that case, we need
2344 // to update the property details in the property dictionary to mark
2345 // it as no longer deleted.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002346 if (instr->hydrogen()->RequiresHoleCheck()) {
2347 // We use a temp to check the payload (CompareRoot might clobber ip).
2348 Register payload = ToRegister(instr->TempAt(0));
2349 __ ldr(payload, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
2350 __ CompareRoot(payload, Heap::kTheHoleValueRootIndex);
Steve Block1e0659c2011-05-24 12:43:12 +01002351 DeoptimizeIf(eq, instr->environment());
2352 }
2353
2354 // Store the value.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002355 __ str(value, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
2356 // Cells are always rescanned, so no write barrier here.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002357}
2358
2359
Ben Murdoch8b112d22011-06-08 16:22:53 +01002360void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2361 ASSERT(ToRegister(instr->global_object()).is(r1));
2362 ASSERT(ToRegister(instr->value()).is(r0));
2363
2364 __ mov(r2, Operand(instr->name()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002365 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002366 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2367 : isolate()->builtins()->StoreIC_Initialize();
2368 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2369}
2370
2371
Ben Murdochb8e0da22011-05-16 14:20:40 +01002372void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002373 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002374 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002375 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002376 if (instr->hydrogen()->RequiresHoleCheck()) {
2377 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2378 __ cmp(result, ip);
2379 if (instr->hydrogen()->DeoptimizesOnHole()) {
2380 DeoptimizeIf(eq, instr->environment());
2381 } else {
2382 __ mov(result, Operand(factory()->undefined_value()), LeaveCC, eq);
2383 }
2384 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002385}
2386
2387
Steve Block1e0659c2011-05-24 12:43:12 +01002388void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2389 Register context = ToRegister(instr->context());
2390 Register value = ToRegister(instr->value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002391 Register scratch = scratch0();
2392 MemOperand target = ContextOperand(context, instr->slot_index());
2393
2394 Label skip_assignment;
2395
2396 if (instr->hydrogen()->RequiresHoleCheck()) {
2397 __ ldr(scratch, target);
2398 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2399 __ cmp(scratch, ip);
2400 if (instr->hydrogen()->DeoptimizesOnHole()) {
2401 DeoptimizeIf(eq, instr->environment());
2402 } else {
2403 __ b(ne, &skip_assignment);
2404 }
Ben Murdochc7cc0282012-03-05 14:35:55 +00002405 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002406
2407 __ str(value, target);
2408 if (instr->hydrogen()->NeedsWriteBarrier()) {
2409 HType type = instr->hydrogen()->value()->type();
2410 SmiCheck check_needed =
2411 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2412 __ RecordWriteContextSlot(context,
2413 target.offset(),
2414 value,
2415 scratch,
2416 kLRHasBeenSaved,
2417 kSaveFPRegs,
2418 EMIT_REMEMBERED_SET,
2419 check_needed);
2420 }
2421
2422 __ bind(&skip_assignment);
Steve Block1e0659c2011-05-24 12:43:12 +01002423}
2424
2425
Ben Murdochb0fe1622011-05-05 13:52:32 +01002426void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002427 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002428 Register result = ToRegister(instr->result());
2429 if (instr->hydrogen()->is_in_object()) {
2430 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2431 } else {
2432 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2433 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2434 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002435}
2436
2437
Ben Murdoch257744e2011-11-30 15:57:28 +00002438void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2439 Register object,
2440 Handle<Map> type,
2441 Handle<String> name) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002442 LookupResult lookup(isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01002443 type->LookupInDescriptors(NULL, *name, &lookup);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002444 ASSERT(lookup.IsFound() &&
Ben Murdoch257744e2011-11-30 15:57:28 +00002445 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2446 if (lookup.type() == FIELD) {
2447 int index = lookup.GetLocalFieldIndexFromMap(*type);
2448 int offset = index * kPointerSize;
2449 if (index < 0) {
2450 // Negative property indices are in-object properties, indexed
2451 // from the end of the fixed part of the object.
2452 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2453 } else {
2454 // Non-negative property indices are in the properties array.
2455 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2456 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2457 }
Steve Block44f0eee2011-05-26 01:26:41 +01002458 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002459 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002460 __ LoadHeapObject(result, function);
Steve Block44f0eee2011-05-26 01:26:41 +01002461 }
2462}
2463
2464
2465void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2466 Register object = ToRegister(instr->object());
2467 Register result = ToRegister(instr->result());
2468 Register scratch = scratch0();
2469 int map_count = instr->hydrogen()->types()->length();
2470 Handle<String> name = instr->hydrogen()->name();
2471 if (map_count == 0) {
2472 ASSERT(instr->hydrogen()->need_generic());
2473 __ mov(r2, Operand(name));
2474 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2475 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2476 } else {
2477 Label done;
2478 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2479 for (int i = 0; i < map_count - 1; ++i) {
2480 Handle<Map> map = instr->hydrogen()->types()->at(i);
2481 Label next;
2482 __ cmp(scratch, Operand(map));
2483 __ b(ne, &next);
Ben Murdoch257744e2011-11-30 15:57:28 +00002484 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002485 __ b(&done);
2486 __ bind(&next);
2487 }
2488 Handle<Map> map = instr->hydrogen()->types()->last();
2489 __ cmp(scratch, Operand(map));
2490 if (instr->hydrogen()->need_generic()) {
2491 Label generic;
2492 __ b(ne, &generic);
Ben Murdoch257744e2011-11-30 15:57:28 +00002493 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002494 __ b(&done);
2495 __ bind(&generic);
2496 __ mov(r2, Operand(name));
2497 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2498 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2499 } else {
2500 DeoptimizeIf(ne, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002501 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002502 }
2503 __ bind(&done);
2504 }
2505}
2506
2507
Ben Murdochb0fe1622011-05-05 13:52:32 +01002508void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2509 ASSERT(ToRegister(instr->object()).is(r0));
2510 ASSERT(ToRegister(instr->result()).is(r0));
2511
2512 // Name is always in r2.
2513 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002514 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002515 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2516}
2517
2518
Steve Block9fac8402011-05-12 15:51:54 +01002519void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2520 Register scratch = scratch0();
2521 Register function = ToRegister(instr->function());
2522 Register result = ToRegister(instr->result());
2523
2524 // Check that the function really is a function. Load map into the
2525 // result register.
2526 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2527 DeoptimizeIf(ne, instr->environment());
2528
2529 // Make sure that the function has an instance prototype.
2530 Label non_instance;
2531 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2532 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2533 __ b(ne, &non_instance);
2534
2535 // Get the prototype or initial map from the function.
2536 __ ldr(result,
2537 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2538
2539 // Check that the function has a prototype or an initial map.
2540 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2541 __ cmp(result, ip);
2542 DeoptimizeIf(eq, instr->environment());
2543
2544 // If the function does not have an initial map, we're done.
2545 Label done;
2546 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2547 __ b(ne, &done);
2548
2549 // Get the prototype from the initial map.
2550 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2551 __ jmp(&done);
2552
2553 // Non-instance prototype: Fetch prototype from constructor field
2554 // in initial map.
2555 __ bind(&non_instance);
2556 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2557
2558 // All done.
2559 __ bind(&done);
2560}
2561
2562
Ben Murdochb0fe1622011-05-05 13:52:32 +01002563void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002564 Register result = ToRegister(instr->result());
2565 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002566 Register scratch = scratch0();
2567
Steve Block1e0659c2011-05-24 12:43:12 +01002568 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002569 if (FLAG_debug_code) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002570 Label done, fail;
Steve Block1e0659c2011-05-24 12:43:12 +01002571 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002572 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2573 __ cmp(scratch, ip);
2574 __ b(eq, &done);
2575 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2576 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002577 __ b(eq, &done);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002578 // |scratch| still contains |input|'s map.
2579 __ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
2580 __ ubfx(scratch, scratch, Map::kElementsKindShift,
2581 Map::kElementsKindBitCount);
Ben Murdoch589d6972011-11-30 16:04:58 +00002582 __ cmp(scratch, Operand(FAST_ELEMENTS));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002583 __ b(eq, &done);
Ben Murdoch589d6972011-11-30 16:04:58 +00002584 __ cmp(scratch, Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002585 __ b(lt, &fail);
Ben Murdoch589d6972011-11-30 16:04:58 +00002586 __ cmp(scratch, Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002587 __ b(le, &done);
2588 __ bind(&fail);
2589 __ Abort("Check for fast or external elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002590 __ bind(&done);
2591 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002592}
2593
2594
Steve Block44f0eee2011-05-26 01:26:41 +01002595void LCodeGen::DoLoadExternalArrayPointer(
2596 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002597 Register to_reg = ToRegister(instr->result());
2598 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002599 __ ldr(to_reg, FieldMemOperand(from_reg,
2600 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002601}
2602
2603
Ben Murdochb0fe1622011-05-05 13:52:32 +01002604void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002605 Register arguments = ToRegister(instr->arguments());
2606 Register length = ToRegister(instr->length());
2607 Register index = ToRegister(instr->index());
2608 Register result = ToRegister(instr->result());
2609
2610 // Bailout index is not a valid argument index. Use unsigned check to get
2611 // negative check for free.
2612 __ sub(length, length, index, SetCC);
2613 DeoptimizeIf(ls, instr->environment());
2614
2615 // There are two words between the frame pointer and the last argument.
2616 // Subtracting from length accounts for one of them add one more.
2617 __ add(length, length, Operand(1));
2618 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002619}
2620
2621
2622void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002623 Register elements = ToRegister(instr->elements());
2624 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002625 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002626 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01002627
2628 // Load the result.
2629 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2630 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2631
Ben Murdochb8e0da22011-05-16 14:20:40 +01002632 // Check for the hole value.
Ben Murdoch257744e2011-11-30 15:57:28 +00002633 if (instr->hydrogen()->RequiresHoleCheck()) {
2634 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2635 __ cmp(result, scratch);
2636 DeoptimizeIf(eq, instr->environment());
2637 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002638}
2639
2640
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002641void LCodeGen::DoLoadKeyedFastDoubleElement(
2642 LLoadKeyedFastDoubleElement* instr) {
2643 Register elements = ToRegister(instr->elements());
2644 bool key_is_constant = instr->key()->IsConstantOperand();
2645 Register key = no_reg;
2646 DwVfpRegister result = ToDoubleRegister(instr->result());
2647 Register scratch = scratch0();
2648
2649 int shift_size =
Ben Murdoch589d6972011-11-30 16:04:58 +00002650 ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002651 int constant_key = 0;
2652 if (key_is_constant) {
2653 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2654 if (constant_key & 0xF0000000) {
2655 Abort("array index constant value too big.");
2656 }
2657 } else {
2658 key = ToRegister(instr->key());
2659 }
2660
2661 Operand operand = key_is_constant
2662 ? Operand(constant_key * (1 << shift_size) +
2663 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
2664 : Operand(key, LSL, shift_size);
2665 __ add(elements, elements, operand);
2666 if (!key_is_constant) {
2667 __ add(elements, elements,
2668 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2669 }
2670
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002671 __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2672 __ cmp(scratch, Operand(kHoleNanUpper32));
2673 DeoptimizeIf(eq, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002674
2675 __ vldr(result, elements, 0);
2676}
2677
2678
Steve Block44f0eee2011-05-26 01:26:41 +01002679void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2680 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002681 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00002682 Register key = no_reg;
Ben Murdoch589d6972011-11-30 16:04:58 +00002683 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00002684 bool key_is_constant = instr->key()->IsConstantOperand();
2685 int constant_key = 0;
2686 if (key_is_constant) {
2687 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2688 if (constant_key & 0xF0000000) {
2689 Abort("array index constant value too big.");
2690 }
2691 } else {
2692 key = ToRegister(instr->key());
2693 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002694 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00002695
Ben Murdoch589d6972011-11-30 16:04:58 +00002696 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
2697 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002698 CpuFeatures::Scope scope(VFP3);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002699 DwVfpRegister result = ToDoubleRegister(instr->result());
2700 Operand operand = key_is_constant
2701 ? Operand(constant_key * (1 << shift_size))
2702 : Operand(key, LSL, shift_size);
Ben Murdoch257744e2011-11-30 15:57:28 +00002703 __ add(scratch0(), external_pointer, operand);
Ben Murdoch589d6972011-11-30 16:04:58 +00002704 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002705 __ vldr(result.low(), scratch0(), 0);
2706 __ vcvt_f64_f32(result, result.low());
Ben Murdoch589d6972011-11-30 16:04:58 +00002707 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00002708 __ vldr(result, scratch0(), 0);
2709 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01002710 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002711 Register result = ToRegister(instr->result());
Ben Murdoch257744e2011-11-30 15:57:28 +00002712 MemOperand mem_operand(key_is_constant
2713 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
2714 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002715 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002716 case EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002717 __ ldrsb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002718 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002719 case EXTERNAL_PIXEL_ELEMENTS:
2720 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002721 __ ldrb(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002722 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002723 case EXTERNAL_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002724 __ ldrsh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002725 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002726 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002727 __ ldrh(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002728 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002729 case EXTERNAL_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002730 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002731 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002732 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002733 __ ldr(result, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002734 __ cmp(result, Operand(0x80000000));
2735 // TODO(danno): we could be more clever here, perhaps having a special
2736 // version of the stub that detects if the overflow case actually
2737 // happens, and generate code that returns a double rather than int.
2738 DeoptimizeIf(cs, instr->environment());
2739 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002740 case EXTERNAL_FLOAT_ELEMENTS:
2741 case EXTERNAL_DOUBLE_ELEMENTS:
2742 case FAST_DOUBLE_ELEMENTS:
2743 case FAST_ELEMENTS:
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002744 case FAST_SMI_ONLY_ELEMENTS:
Ben Murdoch589d6972011-11-30 16:04:58 +00002745 case DICTIONARY_ELEMENTS:
2746 case NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01002747 UNREACHABLE();
2748 break;
2749 }
2750 }
Steve Block1e0659c2011-05-24 12:43:12 +01002751}
2752
2753
Ben Murdochb0fe1622011-05-05 13:52:32 +01002754void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2755 ASSERT(ToRegister(instr->object()).is(r1));
2756 ASSERT(ToRegister(instr->key()).is(r0));
2757
Steve Block44f0eee2011-05-26 01:26:41 +01002758 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002759 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2760}
2761
2762
2763void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002764 Register scratch = scratch0();
2765 Register result = ToRegister(instr->result());
2766
2767 // Check if the calling frame is an arguments adaptor frame.
2768 Label done, adapted;
2769 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2770 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2771 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2772
2773 // Result is the frame pointer for the frame if not adapted and for the real
2774 // frame below the adaptor frame if adapted.
2775 __ mov(result, fp, LeaveCC, ne);
2776 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002777}
2778
2779
2780void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002781 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002782 Register result = ToRegister(instr->result());
2783
2784 Label done;
2785
2786 // If no arguments adaptor frame the number of arguments is fixed.
2787 __ cmp(fp, elem);
2788 __ mov(result, Operand(scope()->num_parameters()));
2789 __ b(eq, &done);
2790
2791 // Arguments adaptor frame present. Get argument length from there.
2792 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2793 __ ldr(result,
2794 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2795 __ SmiUntag(result);
2796
2797 // Argument length is in result register.
2798 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002799}
2800
2801
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002802void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002803 Register receiver = ToRegister(instr->receiver());
2804 Register function = ToRegister(instr->function());
Steve Block1e0659c2011-05-24 12:43:12 +01002805 Register scratch = scratch0();
Ben Murdochb8e0da22011-05-16 14:20:40 +01002806
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002807 // If the receiver is null or undefined, we have to pass the global
2808 // object as a receiver to normal functions. Values have to be
2809 // passed unchanged to builtins and strict-mode functions.
Steve Block1e0659c2011-05-24 12:43:12 +01002810 Label global_object, receiver_ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002811
2812 // Do not transform the receiver to object for strict mode
2813 // functions.
2814 __ ldr(scratch,
2815 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2816 __ ldr(scratch,
2817 FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
2818 __ tst(scratch,
2819 Operand(1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize)));
2820 __ b(ne, &receiver_ok);
2821
2822 // Do not transform the receiver to object for builtins.
2823 __ tst(scratch, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
2824 __ b(ne, &receiver_ok);
2825
2826 // Normal function. Replace undefined or null with global receiver.
Steve Block1e0659c2011-05-24 12:43:12 +01002827 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2828 __ cmp(receiver, scratch);
2829 __ b(eq, &global_object);
2830 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2831 __ cmp(receiver, scratch);
2832 __ b(eq, &global_object);
2833
2834 // Deoptimize if the receiver is not a JS object.
2835 __ tst(receiver, Operand(kSmiTagMask));
2836 DeoptimizeIf(eq, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002837 __ CompareObjectType(receiver, scratch, scratch, FIRST_SPEC_OBJECT_TYPE);
2838 DeoptimizeIf(lt, instr->environment());
Steve Block1e0659c2011-05-24 12:43:12 +01002839 __ jmp(&receiver_ok);
2840
2841 __ bind(&global_object);
2842 __ ldr(receiver, GlobalObjectOperand());
Ben Murdoch257744e2011-11-30 15:57:28 +00002843 __ ldr(receiver,
2844 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002845 __ bind(&receiver_ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002846}
2847
2848
2849void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2850 Register receiver = ToRegister(instr->receiver());
2851 Register function = ToRegister(instr->function());
2852 Register length = ToRegister(instr->length());
2853 Register elements = ToRegister(instr->elements());
2854 Register scratch = scratch0();
2855 ASSERT(receiver.is(r0)); // Used for parameter count.
2856 ASSERT(function.is(r1)); // Required by InvokeFunction.
2857 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002858
2859 // Copy the arguments to this function possibly from the
2860 // adaptor frame below it.
2861 const uint32_t kArgumentsLimit = 1 * KB;
2862 __ cmp(length, Operand(kArgumentsLimit));
2863 DeoptimizeIf(hi, instr->environment());
2864
2865 // Push the receiver and use the register to keep the original
2866 // number of arguments.
2867 __ push(receiver);
2868 __ mov(receiver, length);
2869 // The arguments are at a one pointer size offset from elements.
2870 __ add(elements, elements, Operand(1 * kPointerSize));
2871
2872 // Loop through the arguments pushing them onto the execution
2873 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002874 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002875 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002876 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002877 __ b(eq, &invoke);
2878 __ bind(&loop);
2879 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2880 __ push(scratch);
2881 __ sub(length, length, Operand(1), SetCC);
2882 __ b(ne, &loop);
2883
2884 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002885 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2886 LPointerMap* pointers = instr->pointer_map();
Steve Block1e0659c2011-05-24 12:43:12 +01002887 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002888 SafepointGenerator safepoint_generator(
2889 this, pointers, Safepoint::kLazyDeopt);
Steve Block1e0659c2011-05-24 12:43:12 +01002890 // The number of arguments is stored in receiver which is r0, as expected
2891 // by InvokeFunction.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002892 ParameterCount actual(receiver);
Ben Murdoch257744e2011-11-30 15:57:28 +00002893 __ InvokeFunction(function, actual, CALL_FUNCTION,
2894 safepoint_generator, CALL_AS_METHOD);
Steve Block1e0659c2011-05-24 12:43:12 +01002895 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002896}
2897
2898
2899void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002900 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002901 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2902 Abort("DoPushArgument not implemented for double type.");
2903 } else {
2904 Register argument_reg = EmitLoadRegister(argument, ip);
2905 __ push(argument_reg);
2906 }
2907}
2908
2909
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002910void LCodeGen::DoThisFunction(LThisFunction* instr) {
2911 Register result = ToRegister(instr->result());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002912 __ LoadHeapObject(result, instr->hydrogen()->closure());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002913}
2914
2915
Steve Block1e0659c2011-05-24 12:43:12 +01002916void LCodeGen::DoContext(LContext* instr) {
2917 Register result = ToRegister(instr->result());
2918 __ mov(result, cp);
2919}
2920
2921
2922void LCodeGen::DoOuterContext(LOuterContext* instr) {
2923 Register context = ToRegister(instr->context());
2924 Register result = ToRegister(instr->result());
2925 __ ldr(result,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002926 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block1e0659c2011-05-24 12:43:12 +01002927}
2928
2929
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002930void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2931 __ push(cp); // The context is the first argument.
2932 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
2933 __ push(scratch0());
2934 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
2935 __ push(scratch0());
2936 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
2937}
2938
2939
Ben Murdochb0fe1622011-05-05 13:52:32 +01002940void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2941 Register result = ToRegister(instr->result());
2942 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2943}
2944
2945
2946void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002947 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002948 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002949 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002950}
2951
2952
2953void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2954 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00002955 LInstruction* instr,
2956 CallKind call_kind) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002957 bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
2958 function->shared()->formal_parameter_count() == arity;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002959
2960 LPointerMap* pointers = instr->pointer_map();
2961 RecordPosition(pointers->position());
2962
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002963 if (can_invoke_directly) {
2964 __ LoadHeapObject(r1, function);
2965 // Change context if needed.
2966 bool change_context =
2967 (info()->closure()->context() != function->context()) ||
2968 scope()->contains_with() ||
2969 (scope()->num_heap_slots() > 0);
2970 if (change_context) {
2971 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2972 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002973
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002974 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2975 // is available to write to at this point.
2976 if (!function->NeedsArgumentsAdaption()) {
2977 __ mov(r0, Operand(arity));
2978 }
2979
2980 // Invoke function.
2981 __ SetCallKind(r5, call_kind);
2982 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2983 __ Call(ip);
2984
2985 // Set up deoptimization.
2986 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
2987 } else {
2988 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
2989 ParameterCount count(arity);
2990 __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
2991 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002992
2993 // Restore context.
2994 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2995}
2996
2997
2998void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002999 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003000 CallKnownFunction(instr->function(),
3001 instr->arity(),
3002 instr,
3003 CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003004}
3005
3006
3007void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003008 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003009 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003010 Register scratch = scratch0();
3011
3012 // Deoptimize if not a heap number.
3013 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3014 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3015 __ cmp(scratch, Operand(ip));
3016 DeoptimizeIf(ne, instr->environment());
3017
3018 Label done;
3019 Register exponent = scratch0();
3020 scratch = no_reg;
3021 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3022 // Check the sign of the argument. If the argument is positive, just
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003023 // return it.
Steve Block1e0659c2011-05-24 12:43:12 +01003024 __ tst(exponent, Operand(HeapNumber::kSignMask));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003025 // Move the input to the result if necessary.
3026 __ Move(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01003027 __ b(eq, &done);
3028
3029 // Input is negative. Reverse its sign.
3030 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003031 {
3032 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003033
Ben Murdoch8b112d22011-06-08 16:22:53 +01003034 // Registers were saved at the safepoint, so we can use
3035 // many scratch registers.
3036 Register tmp1 = input.is(r1) ? r0 : r1;
3037 Register tmp2 = input.is(r2) ? r0 : r2;
3038 Register tmp3 = input.is(r3) ? r0 : r3;
3039 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01003040
Ben Murdoch8b112d22011-06-08 16:22:53 +01003041 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01003042
Ben Murdoch8b112d22011-06-08 16:22:53 +01003043 Label allocated, slow;
3044 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
3045 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
3046 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01003047
Ben Murdoch8b112d22011-06-08 16:22:53 +01003048 // Slow case: Call the runtime system to do the number allocation.
3049 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01003050
Ben Murdoch8b112d22011-06-08 16:22:53 +01003051 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3052 // Set the pointer to the new heap number in tmp.
3053 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
3054 // Restore input_reg after call to runtime.
3055 __ LoadFromSafepointRegisterSlot(input, input);
3056 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01003057
Ben Murdoch8b112d22011-06-08 16:22:53 +01003058 __ bind(&allocated);
3059 // exponent: floating point exponent value.
3060 // tmp1: allocated heap number.
3061 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
3062 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
3063 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
3064 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01003065
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003066 __ StoreToSafepointRegisterSlot(tmp1, result);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003067 }
Steve Block1e0659c2011-05-24 12:43:12 +01003068
3069 __ bind(&done);
3070}
3071
3072
3073void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
3074 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003075 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003076 __ cmp(input, Operand(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003077 __ Move(result, input, pl);
Steve Block1e0659c2011-05-24 12:43:12 +01003078 // We can make rsb conditional because the previous cmp instruction
3079 // will clear the V (overflow) flag and rsb won't set this flag
3080 // if input is positive.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003081 __ rsb(result, input, Operand(0), SetCC, mi);
Steve Block1e0659c2011-05-24 12:43:12 +01003082 // Deoptimize on overflow.
3083 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003084}
3085
3086
3087void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003088 // Class for deferred case.
3089 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
3090 public:
3091 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
3092 LUnaryMathOperation* instr)
3093 : LDeferredCode(codegen), instr_(instr) { }
3094 virtual void Generate() {
3095 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3096 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003097 virtual LInstruction* instr() { return instr_; }
Steve Block1e0659c2011-05-24 12:43:12 +01003098 private:
3099 LUnaryMathOperation* instr_;
3100 };
3101
Steve Block1e0659c2011-05-24 12:43:12 +01003102 Representation r = instr->hydrogen()->value()->representation();
3103 if (r.IsDouble()) {
3104 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003105 DwVfpRegister result = ToDoubleRegister(instr->result());
3106 __ vabs(result, input);
Steve Block1e0659c2011-05-24 12:43:12 +01003107 } else if (r.IsInteger32()) {
3108 EmitIntegerMathAbs(instr);
3109 } else {
3110 // Representation is tagged.
3111 DeferredMathAbsTaggedHeapNumber* deferred =
3112 new DeferredMathAbsTaggedHeapNumber(this, instr);
3113 Register input = ToRegister(instr->InputAt(0));
3114 // Smi check.
3115 __ JumpIfNotSmi(input, deferred->entry());
3116 // If smi, handle it directly.
3117 EmitIntegerMathAbs(instr);
3118 __ bind(deferred->exit());
3119 }
3120}
3121
3122
Ben Murdochb0fe1622011-05-05 13:52:32 +01003123void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003124 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01003125 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01003126 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003127 Register scratch1 = scratch0();
3128 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01003129
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003130 __ EmitVFPTruncate(kRoundToMinusInf,
3131 single_scratch,
3132 input,
3133 scratch1,
3134 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003135 DeoptimizeIf(ne, instr->environment());
3136
3137 // Move the result back to general purpose register r0.
3138 __ vmov(result, single_scratch);
3139
Steve Block44f0eee2011-05-26 01:26:41 +01003140 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3141 // Test for -0.
3142 Label done;
3143 __ cmp(result, Operand(0));
3144 __ b(ne, &done);
3145 __ vmov(scratch1, input.high());
3146 __ tst(scratch1, Operand(HeapNumber::kSignMask));
3147 DeoptimizeIf(ne, instr->environment());
3148 __ bind(&done);
3149 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003150}
3151
3152
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003153void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
3154 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
3155 Register result = ToRegister(instr->result());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003156 Register scratch = scratch0();
Ben Murdoch257744e2011-11-30 15:57:28 +00003157 Label done, check_sign_on_zero;
3158
3159 // Extract exponent bits.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003160 __ vmov(result, input.high());
3161 __ ubfx(scratch,
3162 result,
Ben Murdoch257744e2011-11-30 15:57:28 +00003163 HeapNumber::kExponentShift,
3164 HeapNumber::kExponentBits);
3165
3166 // If the number is in ]-0.5, +0.5[, the result is +/- 0.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003167 __ cmp(scratch, Operand(HeapNumber::kExponentBias - 2));
Ben Murdoch257744e2011-11-30 15:57:28 +00003168 __ mov(result, Operand(0), LeaveCC, le);
3169 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3170 __ b(le, &check_sign_on_zero);
3171 } else {
3172 __ b(le, &done);
3173 }
3174
3175 // The following conversion will not work with numbers
3176 // outside of ]-2^32, 2^32[.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003177 __ cmp(scratch, Operand(HeapNumber::kExponentBias + 32));
Ben Murdoch257744e2011-11-30 15:57:28 +00003178 DeoptimizeIf(ge, instr->environment());
3179
3180 // Save the original sign for later comparison.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003181 __ and_(scratch, result, Operand(HeapNumber::kSignMask));
Ben Murdoch257744e2011-11-30 15:57:28 +00003182
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003183 __ Vmov(double_scratch0(), 0.5);
Ben Murdoch692be652012-01-10 18:47:50 +00003184 __ vadd(double_scratch0(), input, double_scratch0());
Ben Murdoch257744e2011-11-30 15:57:28 +00003185
3186 // Check sign of the result: if the sign changed, the input
3187 // value was in ]0.5, 0[ and the result should be -0.
Ben Murdoch692be652012-01-10 18:47:50 +00003188 __ vmov(result, double_scratch0().high());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003189 __ eor(result, result, Operand(scratch), SetCC);
Ben Murdoch257744e2011-11-30 15:57:28 +00003190 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3191 DeoptimizeIf(mi, instr->environment());
3192 } else {
3193 __ mov(result, Operand(0), LeaveCC, mi);
3194 __ b(mi, &done);
3195 }
3196
3197 __ EmitVFPTruncate(kRoundToMinusInf,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003198 double_scratch0().low(),
Ben Murdoch692be652012-01-10 18:47:50 +00003199 double_scratch0(),
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003200 result,
3201 scratch);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003202 DeoptimizeIf(ne, instr->environment());
3203 __ vmov(result, double_scratch0().low());
3204
Steve Block44f0eee2011-05-26 01:26:41 +01003205 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3206 // Test for -0.
Steve Block44f0eee2011-05-26 01:26:41 +01003207 __ cmp(result, Operand(0));
3208 __ b(ne, &done);
Ben Murdoch257744e2011-11-30 15:57:28 +00003209 __ bind(&check_sign_on_zero);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003210 __ vmov(scratch, input.high());
3211 __ tst(scratch, Operand(HeapNumber::kSignMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003212 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003213 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003214 __ bind(&done);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003215}
3216
3217
Ben Murdochb0fe1622011-05-05 13:52:32 +01003218void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003219 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003220 DoubleRegister result = ToDoubleRegister(instr->result());
3221 __ vsqrt(result, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003222}
3223
3224
Steve Block44f0eee2011-05-26 01:26:41 +01003225void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3226 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003227 DoubleRegister result = ToDoubleRegister(instr->result());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003228 DoubleRegister temp = ToDoubleRegister(instr->TempAt(0));
3229
3230 // Note that according to ECMA-262 15.8.2.13:
3231 // Math.pow(-Infinity, 0.5) == Infinity
3232 // Math.sqrt(-Infinity) == NaN
3233 Label done;
3234 __ vmov(temp, -V8_INFINITY);
3235 __ VFPCompareAndSetFlags(input, temp);
3236 __ vneg(result, temp, eq);
3237 __ b(&done, eq);
3238
Steve Block44f0eee2011-05-26 01:26:41 +01003239 // Add +0 to convert -0 to +0.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003240 __ vadd(result, input, kDoubleRegZero);
3241 __ vsqrt(result, result);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003242 __ bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01003243}
3244
3245
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003246void LCodeGen::DoPower(LPower* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003247 Representation exponent_type = instr->hydrogen()->right()->representation();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003248 // Having marked this as a call, we can use any registers.
3249 // Just make sure that the input/output registers are the expected ones.
3250 ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
3251 ToDoubleRegister(instr->InputAt(1)).is(d2));
3252 ASSERT(!instr->InputAt(1)->IsRegister() ||
3253 ToRegister(instr->InputAt(1)).is(r2));
3254 ASSERT(ToDoubleRegister(instr->InputAt(0)).is(d1));
3255 ASSERT(ToDoubleRegister(instr->result()).is(d3));
Ben Murdoch85b71792012-04-11 18:30:58 +01003256
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003257 if (exponent_type.IsTagged()) {
3258 Label no_deopt;
3259 __ JumpIfSmi(r2, &no_deopt);
3260 __ ldr(r7, FieldMemOperand(r2, HeapObject::kMapOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01003261 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003262 __ cmp(r7, Operand(ip));
Ben Murdoch85b71792012-04-11 18:30:58 +01003263 DeoptimizeIf(ne, instr->environment());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003264 __ bind(&no_deopt);
3265 MathPowStub stub(MathPowStub::TAGGED);
3266 __ CallStub(&stub);
3267 } else if (exponent_type.IsInteger32()) {
3268 MathPowStub stub(MathPowStub::INTEGER);
3269 __ CallStub(&stub);
3270 } else {
3271 ASSERT(exponent_type.IsDouble());
3272 MathPowStub stub(MathPowStub::DOUBLE);
3273 __ CallStub(&stub);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003274 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003275}
3276
3277
3278void LCodeGen::DoRandom(LRandom* instr) {
3279 class DeferredDoRandom: public LDeferredCode {
3280 public:
3281 DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
3282 : LDeferredCode(codegen), instr_(instr) { }
3283 virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
3284 virtual LInstruction* instr() { return instr_; }
3285 private:
3286 LRandom* instr_;
3287 };
3288
3289 DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
3290
3291 // Having marked this instruction as a call we can use any
3292 // registers.
3293 ASSERT(ToDoubleRegister(instr->result()).is(d7));
3294 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
3295
3296 static const int kSeedSize = sizeof(uint32_t);
3297 STATIC_ASSERT(kPointerSize == kSeedSize);
3298
3299 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
3300 static const int kRandomSeedOffset =
3301 FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
3302 __ ldr(r2, FieldMemOperand(r0, kRandomSeedOffset));
3303 // r2: FixedArray of the global context's random seeds
3304
3305 // Load state[0].
3306 __ ldr(r1, FieldMemOperand(r2, ByteArray::kHeaderSize));
3307 __ cmp(r1, Operand(0));
3308 __ b(eq, deferred->entry());
3309 // Load state[1].
3310 __ ldr(r0, FieldMemOperand(r2, ByteArray::kHeaderSize + kSeedSize));
3311 // r1: state[0].
3312 // r0: state[1].
3313
3314 // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
3315 __ and_(r3, r1, Operand(0xFFFF));
3316 __ mov(r4, Operand(18273));
3317 __ mul(r3, r3, r4);
3318 __ add(r1, r3, Operand(r1, LSR, 16));
3319 // Save state[0].
3320 __ str(r1, FieldMemOperand(r2, ByteArray::kHeaderSize));
3321
3322 // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
3323 __ and_(r3, r0, Operand(0xFFFF));
3324 __ mov(r4, Operand(36969));
3325 __ mul(r3, r3, r4);
3326 __ add(r0, r3, Operand(r0, LSR, 16));
3327 // Save state[1].
3328 __ str(r0, FieldMemOperand(r2, ByteArray::kHeaderSize + kSeedSize));
3329
3330 // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
3331 __ and_(r0, r0, Operand(0x3FFFF));
3332 __ add(r0, r0, Operand(r1, LSL, 14));
3333
3334 __ bind(deferred->exit());
3335 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3336 // Create this constant using mov/orr to avoid PC relative load.
3337 __ mov(r1, Operand(0x41000000));
3338 __ orr(r1, r1, Operand(0x300000));
3339 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
3340 __ vmov(d7, r0, r1);
3341 // Move 0x4130000000000000 to VFP.
3342 __ mov(r0, Operand(0, RelocInfo::NONE));
3343 __ vmov(d8, r0, r1);
3344 // Subtract and store the result in the heap number.
3345 __ vsub(d7, d7, d8);
3346}
3347
3348
3349void LCodeGen::DoDeferredRandom(LRandom* instr) {
3350 __ PrepareCallCFunction(1, scratch0());
3351 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3352 // Return value is in r0.
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01003353}
3354
3355
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003356void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3357 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3358 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3359 TranscendentalCacheStub::UNTAGGED);
3360 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3361}
3362
3363
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003364void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3365 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3366 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3367 TranscendentalCacheStub::UNTAGGED);
3368 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3369}
3370
3371
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003372void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3373 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3374 TranscendentalCacheStub stub(TranscendentalCache::COS,
3375 TranscendentalCacheStub::UNTAGGED);
3376 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3377}
3378
3379
3380void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3381 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3382 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3383 TranscendentalCacheStub::UNTAGGED);
3384 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3385}
3386
3387
Ben Murdochb0fe1622011-05-05 13:52:32 +01003388void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3389 switch (instr->op()) {
3390 case kMathAbs:
3391 DoMathAbs(instr);
3392 break;
3393 case kMathFloor:
3394 DoMathFloor(instr);
3395 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003396 case kMathRound:
3397 DoMathRound(instr);
3398 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003399 case kMathSqrt:
3400 DoMathSqrt(instr);
3401 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003402 case kMathPowHalf:
3403 DoMathPowHalf(instr);
3404 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003405 case kMathCos:
3406 DoMathCos(instr);
3407 break;
3408 case kMathSin:
3409 DoMathSin(instr);
3410 break;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003411 case kMathTan:
3412 DoMathTan(instr);
3413 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003414 case kMathLog:
3415 DoMathLog(instr);
3416 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003417 default:
3418 Abort("Unimplemented type of LUnaryMathOperation.");
3419 UNREACHABLE();
3420 }
3421}
3422
3423
Ben Murdoch257744e2011-11-30 15:57:28 +00003424void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3425 ASSERT(ToRegister(instr->function()).is(r1));
3426 ASSERT(instr->HasPointerMap());
3427 ASSERT(instr->HasDeoptimizationEnvironment());
3428 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch257744e2011-11-30 15:57:28 +00003429 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00003430 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00003431 ParameterCount count(instr->arity());
3432 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3433 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3434}
3435
3436
Ben Murdochb0fe1622011-05-05 13:52:32 +01003437void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003438 ASSERT(ToRegister(instr->result()).is(r0));
3439
3440 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003441 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003442 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003443 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3444 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003445}
3446
3447
3448void LCodeGen::DoCallNamed(LCallNamed* instr) {
3449 ASSERT(ToRegister(instr->result()).is(r0));
3450
3451 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003452 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3453 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003454 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003455 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003456 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003457 // Restore context register.
3458 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3459}
3460
3461
3462void LCodeGen::DoCallFunction(LCallFunction* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003463 ASSERT(ToRegister(instr->function()).is(r1));
Steve Block9fac8402011-05-12 15:51:54 +01003464 ASSERT(ToRegister(instr->result()).is(r0));
3465
3466 int arity = instr->arity();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003467 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
Steve Block9fac8402011-05-12 15:51:54 +01003468 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +01003469 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003470}
3471
3472
3473void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003474 ASSERT(ToRegister(instr->result()).is(r0));
3475
3476 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003477 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
Steve Block44f0eee2011-05-26 01:26:41 +01003478 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003479 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003480 __ mov(r2, Operand(instr->name()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003481 CallCode(ic, mode, instr);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003482 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003483}
3484
3485
3486void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3487 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003488 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003489}
3490
3491
3492void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003493 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003494 ASSERT(ToRegister(instr->result()).is(r0));
3495
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003496 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003497 __ mov(r0, Operand(instr->arity()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003498 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003499}
3500
3501
3502void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3503 CallRuntime(instr->function(), instr->arity(), instr);
3504}
3505
3506
3507void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003508 Register object = ToRegister(instr->object());
3509 Register value = ToRegister(instr->value());
3510 Register scratch = scratch0();
3511 int offset = instr->offset();
3512
3513 ASSERT(!object.is(value));
3514
3515 if (!instr->transition().is_null()) {
3516 __ mov(scratch, Operand(instr->transition()));
3517 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3518 }
3519
3520 // Do the store.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003521 HType type = instr->hydrogen()->value()->type();
3522 SmiCheck check_needed =
3523 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
Ben Murdoch086aeea2011-05-13 15:57:08 +01003524 if (instr->is_in_object()) {
3525 __ str(value, FieldMemOperand(object, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003526 if (instr->hydrogen()->NeedsWriteBarrier()) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003527 // Update the write barrier for the object for in-object properties.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003528 __ RecordWriteField(object,
3529 offset,
3530 value,
3531 scratch,
3532 kLRHasBeenSaved,
3533 kSaveFPRegs,
3534 EMIT_REMEMBERED_SET,
3535 check_needed);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003536 }
3537 } else {
3538 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3539 __ str(value, FieldMemOperand(scratch, offset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003540 if (instr->hydrogen()->NeedsWriteBarrier()) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003541 // Update the write barrier for the properties array.
3542 // object is used as a scratch register.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003543 __ RecordWriteField(scratch,
3544 offset,
3545 value,
3546 object,
3547 kLRHasBeenSaved,
3548 kSaveFPRegs,
3549 EMIT_REMEMBERED_SET,
3550 check_needed);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003551 }
3552 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003553}
3554
3555
3556void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3557 ASSERT(ToRegister(instr->object()).is(r1));
3558 ASSERT(ToRegister(instr->value()).is(r0));
3559
3560 // Name is always in r2.
3561 __ mov(r2, Operand(instr->name()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003562 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
Steve Block44f0eee2011-05-26 01:26:41 +01003563 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3564 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003565 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3566}
3567
3568
3569void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003570 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003571 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003572}
3573
3574
3575void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003576 Register value = ToRegister(instr->value());
3577 Register elements = ToRegister(instr->object());
3578 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3579 Register scratch = scratch0();
3580
3581 // Do the store.
3582 if (instr->key()->IsConstantOperand()) {
3583 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3584 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3585 int offset =
3586 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3587 __ str(value, FieldMemOperand(elements, offset));
3588 } else {
3589 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3590 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3591 }
3592
3593 if (instr->hydrogen()->NeedsWriteBarrier()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003594 HType type = instr->hydrogen()->value()->type();
3595 SmiCheck check_needed =
3596 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
Ben Murdoch086aeea2011-05-13 15:57:08 +01003597 // Compute address of modified element and store it into key register.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003598 __ add(key, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3599 __ RecordWrite(elements,
3600 key,
3601 value,
3602 kLRHasBeenSaved,
3603 kSaveFPRegs,
3604 EMIT_REMEMBERED_SET,
3605 check_needed);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003606 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003607}
3608
3609
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003610void LCodeGen::DoStoreKeyedFastDoubleElement(
3611 LStoreKeyedFastDoubleElement* instr) {
3612 DwVfpRegister value = ToDoubleRegister(instr->value());
3613 Register elements = ToRegister(instr->elements());
3614 Register key = no_reg;
3615 Register scratch = scratch0();
3616 bool key_is_constant = instr->key()->IsConstantOperand();
3617 int constant_key = 0;
3618 Label not_nan;
3619
3620 // Calculate the effective address of the slot in the array to store the
3621 // double value.
3622 if (key_is_constant) {
3623 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3624 if (constant_key & 0xF0000000) {
3625 Abort("array index constant value too big.");
3626 }
3627 } else {
3628 key = ToRegister(instr->key());
3629 }
Ben Murdoch589d6972011-11-30 16:04:58 +00003630 int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003631 Operand operand = key_is_constant
3632 ? Operand(constant_key * (1 << shift_size) +
3633 FixedDoubleArray::kHeaderSize - kHeapObjectTag)
3634 : Operand(key, LSL, shift_size);
3635 __ add(scratch, elements, operand);
3636 if (!key_is_constant) {
3637 __ add(scratch, scratch,
3638 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3639 }
3640
3641 // Check for NaN. All NaNs must be canonicalized.
3642 __ VFPCompareAndSetFlags(value, value);
3643
3644 // Only load canonical NaN if the comparison above set the overflow.
3645 __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs);
3646
3647 __ bind(&not_nan);
3648 __ vstr(value, scratch, 0);
3649}
3650
3651
Steve Block44f0eee2011-05-26 01:26:41 +01003652void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3653 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003654
3655 Register external_pointer = ToRegister(instr->external_pointer());
Ben Murdoch257744e2011-11-30 15:57:28 +00003656 Register key = no_reg;
Ben Murdoch589d6972011-11-30 16:04:58 +00003657 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch257744e2011-11-30 15:57:28 +00003658 bool key_is_constant = instr->key()->IsConstantOperand();
3659 int constant_key = 0;
3660 if (key_is_constant) {
3661 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3662 if (constant_key & 0xF0000000) {
3663 Abort("array index constant value too big.");
3664 }
3665 } else {
3666 key = ToRegister(instr->key());
3667 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003668 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00003669
Ben Murdoch589d6972011-11-30 16:04:58 +00003670 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
3671 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003672 CpuFeatures::Scope scope(VFP3);
3673 DwVfpRegister value(ToDoubleRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003674 Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
3675 : Operand(key, LSL, shift_size));
3676 __ add(scratch0(), external_pointer, operand);
Ben Murdoch589d6972011-11-30 16:04:58 +00003677 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003678 __ vcvt_f32_f64(double_scratch0().low(), value);
3679 __ vstr(double_scratch0().low(), scratch0(), 0);
Ben Murdoch589d6972011-11-30 16:04:58 +00003680 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
Ben Murdoch257744e2011-11-30 15:57:28 +00003681 __ vstr(value, scratch0(), 0);
3682 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003683 } else {
3684 Register value(ToRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003685 MemOperand mem_operand(key_is_constant
3686 ? MemOperand(external_pointer, constant_key * (1 << shift_size))
3687 : MemOperand(external_pointer, key, LSL, shift_size));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003688 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00003689 case EXTERNAL_PIXEL_ELEMENTS:
3690 case EXTERNAL_BYTE_ELEMENTS:
3691 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003692 __ strb(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003693 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003694 case EXTERNAL_SHORT_ELEMENTS:
3695 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003696 __ strh(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003697 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003698 case EXTERNAL_INT_ELEMENTS:
3699 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003700 __ str(value, mem_operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003701 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003702 case EXTERNAL_FLOAT_ELEMENTS:
3703 case EXTERNAL_DOUBLE_ELEMENTS:
3704 case FAST_DOUBLE_ELEMENTS:
3705 case FAST_ELEMENTS:
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003706 case FAST_SMI_ONLY_ELEMENTS:
Ben Murdoch589d6972011-11-30 16:04:58 +00003707 case DICTIONARY_ELEMENTS:
3708 case NON_STRICT_ARGUMENTS_ELEMENTS:
Ben Murdoch8b112d22011-06-08 16:22:53 +01003709 UNREACHABLE();
3710 break;
3711 }
3712 }
Steve Block44f0eee2011-05-26 01:26:41 +01003713}
3714
3715
Ben Murdochb0fe1622011-05-05 13:52:32 +01003716void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3717 ASSERT(ToRegister(instr->object()).is(r2));
3718 ASSERT(ToRegister(instr->key()).is(r1));
3719 ASSERT(ToRegister(instr->value()).is(r0));
3720
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003721 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
Steve Block44f0eee2011-05-26 01:26:41 +01003722 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3723 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003724 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3725}
3726
3727
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003728void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3729 Register object_reg = ToRegister(instr->object());
3730 Register new_map_reg = ToRegister(instr->new_map_reg());
3731 Register scratch = scratch0();
3732
3733 Handle<Map> from_map = instr->original_map();
3734 Handle<Map> to_map = instr->transitioned_map();
3735 ElementsKind from_kind = from_map->elements_kind();
3736 ElementsKind to_kind = to_map->elements_kind();
3737
3738 Label not_applicable;
3739 __ ldr(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset));
3740 __ cmp(scratch, Operand(from_map));
3741 __ b(ne, &not_applicable);
3742 __ mov(new_map_reg, Operand(to_map));
3743 if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
3744 __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
3745 // Write barrier.
3746 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
3747 scratch, kLRHasBeenSaved, kDontSaveFPRegs);
3748 } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
3749 to_kind == FAST_DOUBLE_ELEMENTS) {
3750 Register fixed_object_reg = ToRegister(instr->temp_reg());
3751 ASSERT(fixed_object_reg.is(r2));
3752 ASSERT(new_map_reg.is(r3));
3753 __ mov(fixed_object_reg, object_reg);
3754 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3755 RelocInfo::CODE_TARGET, instr);
3756 } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
3757 Register fixed_object_reg = ToRegister(instr->temp_reg());
3758 ASSERT(fixed_object_reg.is(r2));
3759 ASSERT(new_map_reg.is(r3));
3760 __ mov(fixed_object_reg, object_reg);
3761 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
3762 RelocInfo::CODE_TARGET, instr);
3763 } else {
3764 UNREACHABLE();
3765 }
3766 __ bind(&not_applicable);
3767}
3768
3769
Ben Murdoch257744e2011-11-30 15:57:28 +00003770void LCodeGen::DoStringAdd(LStringAdd* instr) {
3771 __ push(ToRegister(instr->left()));
3772 __ push(ToRegister(instr->right()));
3773 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3774 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3775}
3776
3777
Steve Block1e0659c2011-05-24 12:43:12 +01003778void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3779 class DeferredStringCharCodeAt: public LDeferredCode {
3780 public:
3781 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3782 : LDeferredCode(codegen), instr_(instr) { }
3783 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003784 virtual LInstruction* instr() { return instr_; }
Steve Block1e0659c2011-05-24 12:43:12 +01003785 private:
3786 LStringCharCodeAt* instr_;
3787 };
3788
Steve Block1e0659c2011-05-24 12:43:12 +01003789 DeferredStringCharCodeAt* deferred =
3790 new DeferredStringCharCodeAt(this, instr);
3791
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003792 StringCharLoadGenerator::Generate(masm(),
3793 ToRegister(instr->string()),
3794 ToRegister(instr->index()),
3795 ToRegister(instr->result()),
3796 deferred->entry());
Steve Block1e0659c2011-05-24 12:43:12 +01003797 __ bind(deferred->exit());
3798}
3799
3800
3801void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3802 Register string = ToRegister(instr->string());
3803 Register result = ToRegister(instr->result());
3804 Register scratch = scratch0();
3805
3806 // TODO(3095996): Get rid of this. For now, we need to make the
3807 // result register contain a valid pointer because it is already
3808 // contained in the register pointer map.
3809 __ mov(result, Operand(0));
3810
Ben Murdoch8b112d22011-06-08 16:22:53 +01003811 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003812 __ push(string);
3813 // Push the index as a smi. This is safe because of the checks in
3814 // DoStringCharCodeAt above.
3815 if (instr->index()->IsConstantOperand()) {
3816 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3817 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3818 __ push(scratch);
3819 } else {
3820 Register index = ToRegister(instr->index());
3821 __ SmiTag(index);
3822 __ push(index);
3823 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003824 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003825 if (FLAG_debug_code) {
3826 __ AbortIfNotSmi(r0);
3827 }
3828 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003829 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003830}
3831
3832
Steve Block44f0eee2011-05-26 01:26:41 +01003833void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3834 class DeferredStringCharFromCode: public LDeferredCode {
3835 public:
3836 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3837 : LDeferredCode(codegen), instr_(instr) { }
3838 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003839 virtual LInstruction* instr() { return instr_; }
Steve Block44f0eee2011-05-26 01:26:41 +01003840 private:
3841 LStringCharFromCode* instr_;
3842 };
3843
3844 DeferredStringCharFromCode* deferred =
3845 new DeferredStringCharFromCode(this, instr);
3846
3847 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3848 Register char_code = ToRegister(instr->char_code());
3849 Register result = ToRegister(instr->result());
3850 ASSERT(!char_code.is(result));
3851
3852 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3853 __ b(hi, deferred->entry());
3854 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3855 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3856 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3857 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3858 __ cmp(result, ip);
3859 __ b(eq, deferred->entry());
3860 __ bind(deferred->exit());
3861}
3862
3863
3864void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3865 Register char_code = ToRegister(instr->char_code());
3866 Register result = ToRegister(instr->result());
3867
3868 // TODO(3095996): Get rid of this. For now, we need to make the
3869 // result register contain a valid pointer because it is already
3870 // contained in the register pointer map.
3871 __ mov(result, Operand(0));
3872
Ben Murdoch8b112d22011-06-08 16:22:53 +01003873 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003874 __ SmiTag(char_code);
3875 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003876 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003877 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003878}
3879
3880
Steve Block1e0659c2011-05-24 12:43:12 +01003881void LCodeGen::DoStringLength(LStringLength* instr) {
3882 Register string = ToRegister(instr->InputAt(0));
3883 Register result = ToRegister(instr->result());
3884 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3885}
3886
3887
Ben Murdochb0fe1622011-05-05 13:52:32 +01003888void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003889 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003890 ASSERT(input->IsRegister() || input->IsStackSlot());
3891 LOperand* output = instr->result();
3892 ASSERT(output->IsDoubleRegister());
3893 SwVfpRegister single_scratch = double_scratch0().low();
3894 if (input->IsStackSlot()) {
3895 Register scratch = scratch0();
3896 __ ldr(scratch, ToMemOperand(input));
3897 __ vmov(single_scratch, scratch);
3898 } else {
3899 __ vmov(single_scratch, ToRegister(input));
3900 }
3901 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003902}
3903
3904
3905void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3906 class DeferredNumberTagI: public LDeferredCode {
3907 public:
3908 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3909 : LDeferredCode(codegen), instr_(instr) { }
3910 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003911 virtual LInstruction* instr() { return instr_; }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003912 private:
3913 LNumberTagI* instr_;
3914 };
3915
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003916 Register src = ToRegister(instr->InputAt(0));
3917 Register dst = ToRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003918
3919 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003920 __ SmiTag(dst, src, SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003921 __ b(vs, deferred->entry());
3922 __ bind(deferred->exit());
3923}
3924
3925
3926void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3927 Label slow;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003928 Register src = ToRegister(instr->InputAt(0));
3929 Register dst = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003930 DoubleRegister dbl_scratch = double_scratch0();
3931 SwVfpRegister flt_scratch = dbl_scratch.low();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003932
3933 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003934 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003935
3936 // There was overflow, so bits 30 and 31 of the original integer
3937 // disagree. Try to allocate a heap number in new space and store
3938 // the value in there. If that fails, call the runtime system.
3939 Label done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003940 if (dst.is(src)) {
3941 __ SmiUntag(src, dst);
3942 __ eor(src, src, Operand(0x80000000));
3943 }
3944 __ vmov(flt_scratch, src);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003945 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3946 if (FLAG_inline_new) {
3947 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3948 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003949 __ Move(dst, r5);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003950 __ b(&done);
3951 }
3952
3953 // Slow case: Call the runtime system to do the number allocation.
3954 __ bind(&slow);
3955
3956 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3957 // register is stored, as this register is in the pointer map, but contains an
3958 // integer value.
3959 __ mov(ip, Operand(0));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003960 __ StoreToSafepointRegisterSlot(ip, dst);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003961 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003962 __ Move(dst, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003963
3964 // Done. Put the value in dbl_scratch into the value of the allocated heap
3965 // number.
3966 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003967 __ sub(ip, dst, Operand(kHeapObjectTag));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003968 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003969 __ StoreToSafepointRegisterSlot(dst, dst);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003970}
3971
3972
3973void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3974 class DeferredNumberTagD: public LDeferredCode {
3975 public:
3976 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3977 : LDeferredCode(codegen), instr_(instr) { }
3978 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003979 virtual LInstruction* instr() { return instr_; }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003980 private:
3981 LNumberTagD* instr_;
3982 };
3983
Steve Block1e0659c2011-05-24 12:43:12 +01003984 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003985 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003986 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003987 Register temp1 = ToRegister(instr->TempAt(0));
3988 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003989
3990 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3991 if (FLAG_inline_new) {
3992 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3993 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3994 } else {
3995 __ jmp(deferred->entry());
3996 }
3997 __ bind(deferred->exit());
3998 __ sub(ip, reg, Operand(kHeapObjectTag));
3999 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
4000}
4001
4002
4003void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
4004 // TODO(3095996): Get rid of this. For now, we need to make the
4005 // result register contain a valid pointer because it is already
4006 // contained in the register pointer map.
4007 Register reg = ToRegister(instr->result());
4008 __ mov(reg, Operand(0));
4009
Ben Murdoch8b112d22011-06-08 16:22:53 +01004010 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4011 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004012 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004013}
4014
4015
4016void LCodeGen::DoSmiTag(LSmiTag* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004017 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004018 __ SmiTag(ToRegister(instr->result()), ToRegister(instr->InputAt(0)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004019}
4020
4021
4022void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004023 Register input = ToRegister(instr->InputAt(0));
4024 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004025 if (instr->needs_check()) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004026 STATIC_ASSERT(kHeapObjectTag == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004027 // If the input is a HeapObject, SmiUntag will set the carry flag.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004028 __ SmiUntag(result, input, SetCC);
Ben Murdoch257744e2011-11-30 15:57:28 +00004029 DeoptimizeIf(cs, instr->environment());
4030 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004031 __ SmiUntag(result, input);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004032 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004033}
4034
4035
4036void LCodeGen::EmitNumberUntagD(Register input_reg,
4037 DoubleRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01004038 bool deoptimize_on_undefined,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004039 bool deoptimize_on_minus_zero,
Ben Murdochb0fe1622011-05-05 13:52:32 +01004040 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01004041 Register scratch = scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004042 SwVfpRegister flt_scratch = double_scratch0().low();
4043 ASSERT(!result_reg.is(double_scratch0()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004044
4045 Label load_smi, heap_number, done;
4046
4047 // Smi check.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004048 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004049
4050 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01004051 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004052 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01004053 __ cmp(scratch, Operand(ip));
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01004054 if (deoptimize_on_undefined) {
4055 DeoptimizeIf(ne, env);
4056 } else {
4057 Label heap_number;
4058 __ b(eq, &heap_number);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004059
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01004060 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4061 __ cmp(input_reg, Operand(ip));
4062 DeoptimizeIf(ne, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004063
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01004064 // Convert undefined to NaN.
4065 __ LoadRoot(ip, Heap::kNanValueRootIndex);
4066 __ sub(ip, ip, Operand(kHeapObjectTag));
4067 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
4068 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004069
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01004070 __ bind(&heap_number);
4071 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004072 // Heap number to double register conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004073 __ sub(ip, input_reg, Operand(kHeapObjectTag));
4074 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004075 if (deoptimize_on_minus_zero) {
4076 __ vmov(ip, result_reg.low());
4077 __ cmp(ip, Operand(0));
4078 __ b(ne, &done);
4079 __ vmov(ip, result_reg.high());
4080 __ cmp(ip, Operand(HeapNumber::kSignMask));
4081 DeoptimizeIf(eq, env);
4082 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004083 __ jmp(&done);
4084
4085 // Smi to double register conversion
4086 __ bind(&load_smi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004087 // scratch: untagged value of input_reg
4088 __ vmov(flt_scratch, scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004089 __ vcvt_f64_s32(result_reg, flt_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004090 __ bind(&done);
4091}
4092
4093
Ben Murdochb0fe1622011-05-05 13:52:32 +01004094void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004095 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01004096 Register scratch1 = scratch0();
4097 Register scratch2 = ToRegister(instr->TempAt(0));
4098 DwVfpRegister double_scratch = double_scratch0();
4099 SwVfpRegister single_scratch = double_scratch.low();
4100
4101 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
4102 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
4103
4104 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004105
Ben Murdoch257744e2011-11-30 15:57:28 +00004106 // The input was optimistically untagged; revert it.
4107 // The carry flag is set when we reach this deferred code as we just executed
4108 // SmiUntag(heap_object, SetCC)
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004109 STATIC_ASSERT(kHeapObjectTag == 1);
Ben Murdoch257744e2011-11-30 15:57:28 +00004110 __ adc(input_reg, input_reg, Operand(input_reg));
4111
Ben Murdochb0fe1622011-05-05 13:52:32 +01004112 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01004113 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004114 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01004115 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004116
4117 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01004118 Register scratch3 = ToRegister(instr->TempAt(1));
4119 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
4120 ASSERT(!scratch3.is(input_reg) &&
4121 !scratch3.is(scratch1) &&
4122 !scratch3.is(scratch2));
4123 // Performs a truncating conversion of a floating point number as used by
4124 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004125 Label heap_number;
4126 __ b(eq, &heap_number);
4127 // Check for undefined. Undefined is converted to zero for truncating
4128 // conversions.
4129 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4130 __ cmp(input_reg, Operand(ip));
4131 DeoptimizeIf(ne, instr->environment());
4132 __ mov(input_reg, Operand(0));
4133 __ b(&done);
4134
4135 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01004136 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
4137 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
4138
4139 __ EmitECMATruncate(input_reg,
4140 double_scratch2,
4141 single_scratch,
4142 scratch1,
4143 scratch2,
4144 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004145
4146 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01004147 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004148 // Deoptimize if we don't have a heap number.
4149 DeoptimizeIf(ne, instr->environment());
4150
4151 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01004152 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
4153 __ EmitVFPTruncate(kRoundToZero,
4154 single_scratch,
4155 double_scratch,
4156 scratch1,
4157 scratch2,
4158 kCheckForInexactConversion);
4159 DeoptimizeIf(ne, instr->environment());
4160 // Load the result.
4161 __ vmov(input_reg, single_scratch);
4162
Ben Murdochb0fe1622011-05-05 13:52:32 +01004163 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01004164 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004165 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01004166 __ vmov(scratch1, double_scratch.high());
4167 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004168 DeoptimizeIf(ne, instr->environment());
4169 }
4170 }
4171 __ bind(&done);
4172}
4173
4174
4175void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004176 class DeferredTaggedToI: public LDeferredCode {
4177 public:
4178 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
4179 : LDeferredCode(codegen), instr_(instr) { }
4180 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
4181 virtual LInstruction* instr() { return instr_; }
4182 private:
4183 LTaggedToI* instr_;
4184 };
4185
Steve Block1e0659c2011-05-24 12:43:12 +01004186 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004187 ASSERT(input->IsRegister());
4188 ASSERT(input->Equals(instr->result()));
4189
4190 Register input_reg = ToRegister(input);
4191
4192 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
4193
Ben Murdoch257744e2011-11-30 15:57:28 +00004194 // Optimistically untag the input.
4195 // If the input is a HeapObject, SmiUntag will set the carry flag.
4196 __ SmiUntag(input_reg, SetCC);
4197 // Branch to deferred code if the input was tagged.
4198 // The deferred code will take care of restoring the tag.
4199 __ b(cs, deferred->entry());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004200 __ bind(deferred->exit());
4201}
4202
4203
4204void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004205 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004206 ASSERT(input->IsRegister());
4207 LOperand* result = instr->result();
4208 ASSERT(result->IsDoubleRegister());
4209
4210 Register input_reg = ToRegister(input);
4211 DoubleRegister result_reg = ToDoubleRegister(result);
4212
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01004213 EmitNumberUntagD(input_reg, result_reg,
4214 instr->hydrogen()->deoptimize_on_undefined(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004215 instr->hydrogen()->deoptimize_on_minus_zero(),
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01004216 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004217}
4218
4219
4220void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01004221 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01004222 Register scratch1 = scratch0();
4223 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01004224 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01004225 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01004226
Steve Block44f0eee2011-05-26 01:26:41 +01004227 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01004228
Steve Block44f0eee2011-05-26 01:26:41 +01004229 if (instr->truncating()) {
4230 Register scratch3 = ToRegister(instr->TempAt(1));
4231 __ EmitECMATruncate(result_reg,
4232 double_input,
4233 single_scratch,
4234 scratch1,
4235 scratch2,
4236 scratch3);
4237 } else {
4238 VFPRoundingMode rounding_mode = kRoundToMinusInf;
4239 __ EmitVFPTruncate(rounding_mode,
4240 single_scratch,
4241 double_input,
4242 scratch1,
4243 scratch2,
4244 kCheckForInexactConversion);
4245 // Deoptimize if we had a vfp invalid exception,
4246 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01004247 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01004248 // Retrieve the result.
4249 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01004250 }
Steve Block44f0eee2011-05-26 01:26:41 +01004251 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004252}
4253
4254
4255void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004256 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004257 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01004258 DeoptimizeIf(ne, instr->environment());
4259}
4260
4261
4262void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4263 LOperand* input = instr->InputAt(0);
4264 __ tst(ToRegister(input), Operand(kSmiTagMask));
4265 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004266}
4267
4268
4269void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004270 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004271 Register scratch = scratch0();
Ben Murdoch086aeea2011-05-13 15:57:08 +01004272
4273 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
4274 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004275
Ben Murdoch257744e2011-11-30 15:57:28 +00004276 if (instr->hydrogen()->is_interval_check()) {
4277 InstanceType first;
4278 InstanceType last;
4279 instr->hydrogen()->GetCheckInterval(&first, &last);
4280
4281 __ cmp(scratch, Operand(first));
4282
4283 // If there is only one type in the interval check for equality.
4284 if (first == last) {
4285 DeoptimizeIf(ne, instr->environment());
4286 } else {
4287 DeoptimizeIf(lo, instr->environment());
4288 // Omit check for the last type.
4289 if (last != LAST_TYPE) {
4290 __ cmp(scratch, Operand(last));
4291 DeoptimizeIf(hi, instr->environment());
4292 }
4293 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01004294 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004295 uint8_t mask;
4296 uint8_t tag;
4297 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4298
4299 if (IsPowerOf2(mask)) {
4300 ASSERT(tag == 0 || IsPowerOf2(tag));
4301 __ tst(scratch, Operand(mask));
4302 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment());
4303 } else {
4304 __ and_(scratch, scratch, Operand(mask));
4305 __ cmp(scratch, Operand(tag));
4306 DeoptimizeIf(ne, instr->environment());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004307 }
4308 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004309}
4310
4311
4312void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004313 Register reg = ToRegister(instr->value());
4314 Handle<JSFunction> target = instr->hydrogen()->target();
4315 if (isolate()->heap()->InNewSpace(*target)) {
4316 Register reg = ToRegister(instr->value());
4317 Handle<JSGlobalPropertyCell> cell =
4318 isolate()->factory()->NewJSGlobalPropertyCell(target);
4319 __ mov(ip, Operand(Handle<Object>(cell)));
4320 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
4321 __ cmp(reg, ip);
4322 } else {
4323 __ cmp(reg, Operand(target));
4324 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004325 DeoptimizeIf(ne, instr->environment());
4326}
4327
4328
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004329void LCodeGen::DoCheckMapCommon(Register reg,
4330 Register scratch,
4331 Handle<Map> map,
4332 CompareMapMode mode,
4333 LEnvironment* env) {
4334 Label success;
4335 __ CompareMap(reg, scratch, map, &success, mode);
4336 DeoptimizeIf(ne, env);
4337 __ bind(&success);
4338}
4339
4340
Ben Murdochb0fe1622011-05-05 13:52:32 +01004341void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01004342 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01004343 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004344 ASSERT(input->IsRegister());
4345 Register reg = ToRegister(input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004346 Handle<Map> map = instr->hydrogen()->map();
4347 DoCheckMapCommon(reg, scratch, map, instr->hydrogen()->mode(),
4348 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004349}
4350
4351
Ben Murdoch257744e2011-11-30 15:57:28 +00004352void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4353 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
4354 Register result_reg = ToRegister(instr->result());
4355 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4356 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
4357}
4358
4359
4360void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4361 Register unclamped_reg = ToRegister(instr->unclamped());
4362 Register result_reg = ToRegister(instr->result());
4363 __ ClampUint8(result_reg, unclamped_reg);
4364}
4365
4366
4367void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4368 Register scratch = scratch0();
4369 Register input_reg = ToRegister(instr->unclamped());
4370 Register result_reg = ToRegister(instr->result());
4371 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4372 Label is_smi, done, heap_number;
4373
4374 // Both smi and heap number cases are handled.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004375 __ UntagAndJumpIfSmi(result_reg, input_reg, &is_smi);
Ben Murdoch257744e2011-11-30 15:57:28 +00004376
4377 // Check for heap number
4378 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4379 __ cmp(scratch, Operand(factory()->heap_number_map()));
4380 __ b(eq, &heap_number);
4381
4382 // Check for undefined. Undefined is converted to zero for clamping
4383 // conversions.
4384 __ cmp(input_reg, Operand(factory()->undefined_value()));
4385 DeoptimizeIf(ne, instr->environment());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004386 __ mov(result_reg, Operand(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00004387 __ jmp(&done);
4388
4389 // Heap number
4390 __ bind(&heap_number);
4391 __ vldr(double_scratch0(), FieldMemOperand(input_reg,
4392 HeapNumber::kValueOffset));
4393 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
4394 __ jmp(&done);
4395
4396 // smi
4397 __ bind(&is_smi);
Ben Murdoch257744e2011-11-30 15:57:28 +00004398 __ ClampUint8(result_reg, result_reg);
4399
4400 __ bind(&done);
4401}
4402
4403
Ben Murdochb0fe1622011-05-05 13:52:32 +01004404void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004405 Register temp1 = ToRegister(instr->TempAt(0));
4406 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01004407
4408 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01004409 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01004410
4411 // Load prototype object.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004412 __ LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004413
4414 // Check prototype maps up to the holder.
4415 while (!current_prototype.is_identical_to(holder)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004416 DoCheckMapCommon(temp1, temp2,
4417 Handle<Map>(current_prototype->map()),
4418 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
Steve Block9fac8402011-05-12 15:51:54 +01004419 current_prototype =
4420 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4421 // Load next prototype object.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004422 __ LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01004423 }
4424
4425 // Check the holder map.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004426 DoCheckMapCommon(temp1, temp2,
4427 Handle<Map>(current_prototype->map()),
4428 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
Steve Block9fac8402011-05-12 15:51:54 +01004429 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004430}
4431
4432
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004433void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4434 class DeferredAllocateObject: public LDeferredCode {
4435 public:
4436 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4437 : LDeferredCode(codegen), instr_(instr) { }
4438 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4439 virtual LInstruction* instr() { return instr_; }
4440 private:
4441 LAllocateObject* instr_;
4442 };
4443
4444 DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
4445
4446 Register result = ToRegister(instr->result());
4447 Register scratch = ToRegister(instr->TempAt(0));
4448 Register scratch2 = ToRegister(instr->TempAt(1));
4449 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4450 Handle<Map> initial_map(constructor->initial_map());
4451 int instance_size = initial_map->instance_size();
4452 ASSERT(initial_map->pre_allocated_property_fields() +
4453 initial_map->unused_property_fields() -
4454 initial_map->inobject_properties() == 0);
4455
4456 // Allocate memory for the object. The initial map might change when
4457 // the constructor's prototype changes, but instance size and property
4458 // counts remain unchanged (if slack tracking finished).
4459 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
4460 __ AllocateInNewSpace(instance_size,
4461 result,
4462 scratch,
4463 scratch2,
4464 deferred->entry(),
4465 TAG_OBJECT);
4466
4467 // Load the initial map.
4468 Register map = scratch;
4469 __ LoadHeapObject(map, constructor);
4470 __ ldr(map, FieldMemOperand(map, JSFunction::kPrototypeOrInitialMapOffset));
4471
4472 // Initialize map and fields of the newly allocated object.
4473 ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
4474 __ str(map, FieldMemOperand(result, JSObject::kMapOffset));
4475 __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
4476 __ str(scratch, FieldMemOperand(result, JSObject::kElementsOffset));
4477 __ str(scratch, FieldMemOperand(result, JSObject::kPropertiesOffset));
4478 if (initial_map->inobject_properties() != 0) {
4479 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4480 for (int i = 0; i < initial_map->inobject_properties(); i++) {
4481 int property_offset = JSObject::kHeaderSize + i * kPointerSize;
4482 __ str(scratch, FieldMemOperand(result, property_offset));
4483 }
4484 }
4485
4486 __ bind(deferred->exit());
4487}
4488
4489
4490void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4491 Register result = ToRegister(instr->result());
4492 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4493
4494 // TODO(3095996): Get rid of this. For now, we need to make the
4495 // result register contain a valid pointer because it is already
4496 // contained in the register pointer map.
4497 __ mov(result, Operand(0));
4498
4499 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4500 __ LoadHeapObject(r0, constructor);
4501 __ push(r0);
4502 CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
4503 __ StoreToSafepointRegisterSlot(r0, result);
4504}
4505
4506
Ben Murdochb0fe1622011-05-05 13:52:32 +01004507void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004508 Heap* heap = isolate()->heap();
4509 ElementsKind boilerplate_elements_kind =
4510 instr->hydrogen()->boilerplate_elements_kind();
4511
4512 // Deopt if the array literal boilerplate ElementsKind is of a type different
4513 // than the expected one. The check isn't necessary if the boilerplate has
4514 // already been converted to FAST_ELEMENTS.
4515 if (boilerplate_elements_kind != FAST_ELEMENTS) {
4516 __ LoadHeapObject(r1, instr->hydrogen()->boilerplate_object());
4517 // Load map into r2.
4518 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
4519 // Load the map's "bit field 2".
4520 __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset));
4521 // Retrieve elements_kind from bit field 2.
4522 __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount);
4523 __ cmp(r2, Operand(boilerplate_elements_kind));
4524 DeoptimizeIf(ne, instr->environment());
4525 }
4526
Steve Block9fac8402011-05-12 15:51:54 +01004527 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4528 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4529 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004530 // Boilerplate already exists, constant elements are never accessed.
4531 // Pass an empty fixed array.
4532 __ mov(r1, Operand(Handle<FixedArray>(heap->empty_fixed_array())));
Steve Block9fac8402011-05-12 15:51:54 +01004533 __ Push(r3, r2, r1);
4534
4535 // Pick the right runtime function or stub to call.
4536 int length = instr->hydrogen()->length();
4537 if (instr->hydrogen()->IsCopyOnWrite()) {
4538 ASSERT(instr->hydrogen()->depth() == 1);
4539 FastCloneShallowArrayStub::Mode mode =
4540 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4541 FastCloneShallowArrayStub stub(mode, length);
4542 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4543 } else if (instr->hydrogen()->depth() > 1) {
4544 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4545 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4546 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4547 } else {
4548 FastCloneShallowArrayStub::Mode mode =
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004549 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
4550 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
4551 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
Steve Block9fac8402011-05-12 15:51:54 +01004552 FastCloneShallowArrayStub stub(mode, length);
4553 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4554 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004555}
4556
4557
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004558void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4559 Register result,
4560 Register source,
4561 int* offset) {
4562 ASSERT(!source.is(r2));
4563 ASSERT(!result.is(r2));
4564
4565 // Only elements backing stores for non-COW arrays need to be copied.
4566 Handle<FixedArrayBase> elements(object->elements());
4567 bool has_elements = elements->length() > 0 &&
4568 elements->map() != isolate()->heap()->fixed_cow_array_map();
4569
4570 // Increase the offset so that subsequent objects end up right after
4571 // this object and its backing store.
4572 int object_offset = *offset;
4573 int object_size = object->map()->instance_size();
4574 int elements_offset = *offset + object_size;
4575 int elements_size = has_elements ? elements->Size() : 0;
4576 *offset += object_size + elements_size;
4577
4578 // Copy object header.
4579 ASSERT(object->properties()->length() == 0);
4580 int inobject_properties = object->map()->inobject_properties();
4581 int header_size = object_size - inobject_properties * kPointerSize;
4582 for (int i = 0; i < header_size; i += kPointerSize) {
4583 if (has_elements && i == JSObject::kElementsOffset) {
4584 __ add(r2, result, Operand(elements_offset));
4585 } else {
4586 __ ldr(r2, FieldMemOperand(source, i));
4587 }
4588 __ str(r2, FieldMemOperand(result, object_offset + i));
4589 }
4590
4591 // Copy in-object properties.
4592 for (int i = 0; i < inobject_properties; i++) {
4593 int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
4594 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
4595 if (value->IsJSObject()) {
4596 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4597 __ add(r2, result, Operand(*offset));
4598 __ str(r2, FieldMemOperand(result, total_offset));
4599 __ LoadHeapObject(source, value_object);
4600 EmitDeepCopy(value_object, result, source, offset);
4601 } else if (value->IsHeapObject()) {
4602 __ LoadHeapObject(r2, Handle<HeapObject>::cast(value));
4603 __ str(r2, FieldMemOperand(result, total_offset));
4604 } else {
4605 __ mov(r2, Operand(value));
4606 __ str(r2, FieldMemOperand(result, total_offset));
4607 }
4608 }
4609
4610 if (has_elements) {
4611 // Copy elements backing store header.
4612 __ LoadHeapObject(source, elements);
4613 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
4614 __ ldr(r2, FieldMemOperand(source, i));
4615 __ str(r2, FieldMemOperand(result, elements_offset + i));
4616 }
4617
4618 // Copy elements backing store content.
4619 int elements_length = has_elements ? elements->length() : 0;
4620 if (elements->IsFixedDoubleArray()) {
4621 Handle<FixedDoubleArray> double_array =
4622 Handle<FixedDoubleArray>::cast(elements);
4623 for (int i = 0; i < elements_length; i++) {
4624 int64_t value = double_array->get_representation(i);
4625 // We only support little endian mode...
4626 int32_t value_low = value & 0xFFFFFFFF;
4627 int32_t value_high = value >> 32;
4628 int total_offset =
4629 elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
4630 __ mov(r2, Operand(value_low));
4631 __ str(r2, FieldMemOperand(result, total_offset));
4632 __ mov(r2, Operand(value_high));
4633 __ str(r2, FieldMemOperand(result, total_offset + 4));
4634 }
4635 } else if (elements->IsFixedArray()) {
4636 for (int i = 0; i < elements_length; i++) {
4637 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
4638 Handle<Object> value = JSObject::GetElement(object, i);
4639 if (value->IsJSObject()) {
4640 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4641 __ add(r2, result, Operand(*offset));
4642 __ str(r2, FieldMemOperand(result, total_offset));
4643 __ LoadHeapObject(source, value_object);
4644 EmitDeepCopy(value_object, result, source, offset);
4645 } else if (value->IsHeapObject()) {
4646 __ LoadHeapObject(r2, Handle<HeapObject>::cast(value));
4647 __ str(r2, FieldMemOperand(result, total_offset));
4648 } else {
4649 __ mov(r2, Operand(value));
4650 __ str(r2, FieldMemOperand(result, total_offset));
4651 }
4652 }
4653 } else {
4654 UNREACHABLE();
4655 }
4656 }
4657}
4658
4659
4660void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4661 int size = instr->hydrogen()->total_size();
4662
4663 // Allocate all objects that are part of the literal in one big
4664 // allocation. This avoids multiple limit checks.
4665 Label allocated, runtime_allocate;
4666 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
4667 __ jmp(&allocated);
4668
4669 __ bind(&runtime_allocate);
4670 __ mov(r0, Operand(Smi::FromInt(size)));
4671 __ push(r0);
4672 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4673
4674 __ bind(&allocated);
4675 int offset = 0;
4676 __ LoadHeapObject(r1, instr->hydrogen()->boilerplate());
4677 EmitDeepCopy(instr->hydrogen()->boilerplate(), r0, r1, &offset);
4678 ASSERT_EQ(size, offset);
4679}
4680
4681
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01004682void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004683 Handle<FixedArray> literals(instr->environment()->closure()->literals());
4684 Handle<FixedArray> constant_properties =
4685 instr->hydrogen()->constant_properties();
4686
4687 // Set up the parameters to the stub/runtime call.
4688 __ LoadHeapObject(r4, literals);
Steve Block9fac8402011-05-12 15:51:54 +01004689 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004690 __ mov(r2, Operand(constant_properties));
4691 int flags = instr->hydrogen()->fast_elements()
4692 ? ObjectLiteral::kFastElements
4693 : ObjectLiteral::kNoFlags;
4694 __ mov(r1, Operand(Smi::FromInt(flags)));
Steve Block9fac8402011-05-12 15:51:54 +01004695 __ Push(r4, r3, r2, r1);
4696
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004697 // Pick the right runtime function or stub to call.
4698 int properties_count = constant_properties->length() / 2;
Steve Block9fac8402011-05-12 15:51:54 +01004699 if (instr->hydrogen()->depth() > 1) {
4700 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004701 } else if (flags != ObjectLiteral::kFastElements ||
4702 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004703 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004704 } else {
4705 FastCloneShallowObjectStub stub(properties_count);
4706 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +01004707 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004708}
4709
4710
Steve Block44f0eee2011-05-26 01:26:41 +01004711void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4712 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
4713 __ push(r0);
4714 CallRuntime(Runtime::kToFastProperties, 1, instr);
4715}
4716
4717
Ben Murdochb0fe1622011-05-05 13:52:32 +01004718void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004719 Label materialized;
4720 // Registers will be used as follows:
4721 // r3 = JS function.
4722 // r7 = literals array.
4723 // r1 = regexp literal.
4724 // r0 = regexp literal clone.
4725 // r2 and r4-r6 are used as temporaries.
4726 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4727 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
4728 int literal_offset = FixedArray::kHeaderSize +
4729 instr->hydrogen()->literal_index() * kPointerSize;
4730 __ ldr(r1, FieldMemOperand(r7, literal_offset));
4731 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4732 __ cmp(r1, ip);
4733 __ b(ne, &materialized);
4734
4735 // Create regexp literal using runtime function
4736 // Result will be in r0.
4737 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4738 __ mov(r5, Operand(instr->hydrogen()->pattern()));
4739 __ mov(r4, Operand(instr->hydrogen()->flags()));
4740 __ Push(r7, r6, r5, r4);
4741 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4742 __ mov(r1, r0);
4743
4744 __ bind(&materialized);
4745 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4746 Label allocated, runtime_allocate;
4747
4748 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
4749 __ jmp(&allocated);
4750
4751 __ bind(&runtime_allocate);
4752 __ mov(r0, Operand(Smi::FromInt(size)));
4753 __ Push(r1, r0);
4754 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4755 __ pop(r1);
4756
4757 __ bind(&allocated);
4758 // Copy the content into the newly allocated memory.
4759 // (Unroll copy loop once for better throughput).
4760 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4761 __ ldr(r3, FieldMemOperand(r1, i));
4762 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
4763 __ str(r3, FieldMemOperand(r0, i));
4764 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
4765 }
4766 if ((size % (2 * kPointerSize)) != 0) {
4767 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
4768 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
4769 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004770}
4771
4772
4773void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004774 // Use the fast case closure allocation code that allocates in new
4775 // space for nested functions that don't need literals cloning.
4776 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004777 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004778 if (!pretenure && shared_info->num_literals() == 0) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004779 FastNewClosureStub stub(shared_info->language_mode());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004780 __ mov(r1, Operand(shared_info));
4781 __ push(r1);
4782 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4783 } else {
4784 __ mov(r2, Operand(shared_info));
4785 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004786 ? factory()->true_value()
4787 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004788 __ Push(cp, r2, r1);
4789 CallRuntime(Runtime::kNewClosure, 3, instr);
4790 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004791}
4792
4793
4794void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004795 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004796 __ push(input);
4797 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004798}
4799
4800
Ben Murdochb0fe1622011-05-05 13:52:32 +01004801void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004802 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004803 int true_block = chunk_->LookupDestination(instr->true_block_id());
4804 int false_block = chunk_->LookupDestination(instr->false_block_id());
4805 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4806 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4807
4808 Condition final_branch_condition = EmitTypeofIs(true_label,
4809 false_label,
4810 input,
4811 instr->type_literal());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004812 if (final_branch_condition != kNoCondition) {
4813 EmitBranch(true_block, false_block, final_branch_condition);
4814 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004815}
4816
4817
4818Condition LCodeGen::EmitTypeofIs(Label* true_label,
4819 Label* false_label,
4820 Register input,
4821 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004822 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004823 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004824 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004825 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004826 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4827 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4828 __ cmp(input, Operand(ip));
4829 final_branch_condition = eq;
4830
Steve Block44f0eee2011-05-26 01:26:41 +01004831 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004832 __ JumpIfSmi(input, false_label);
4833 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4834 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004835 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4836 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004837 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004838
Steve Block44f0eee2011-05-26 01:26:41 +01004839 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004840 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004841 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004842 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004843 final_branch_condition = eq;
4844
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004845 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4846 __ CompareRoot(input, Heap::kNullValueRootIndex);
4847 final_branch_condition = eq;
4848
Steve Block44f0eee2011-05-26 01:26:41 +01004849 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004850 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004851 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004852 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004853 // Check for undetectable objects => true.
4854 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4855 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4856 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4857 final_branch_condition = ne;
4858
Steve Block44f0eee2011-05-26 01:26:41 +01004859 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004860 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004861 __ JumpIfSmi(input, false_label);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004862 __ CompareObjectType(input, scratch, input, JS_FUNCTION_TYPE);
4863 __ b(eq, true_label);
4864 __ cmp(input, Operand(JS_FUNCTION_PROXY_TYPE));
4865 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004866
Steve Block44f0eee2011-05-26 01:26:41 +01004867 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004868 __ JumpIfSmi(input, false_label);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004869 if (!FLAG_harmony_typeof) {
4870 __ CompareRoot(input, Heap::kNullValueRootIndex);
4871 __ b(eq, true_label);
4872 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004873 __ CompareObjectType(input, input, scratch,
4874 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4875 __ b(lt, false_label);
4876 __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4877 __ b(gt, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004878 // Check for undetectable objects => false.
4879 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4880 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004881 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004882
4883 } else {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004884 __ b(false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004885 }
4886
4887 return final_branch_condition;
4888}
4889
4890
Steve Block1e0659c2011-05-24 12:43:12 +01004891void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4892 Register temp1 = ToRegister(instr->TempAt(0));
4893 int true_block = chunk_->LookupDestination(instr->true_block_id());
4894 int false_block = chunk_->LookupDestination(instr->false_block_id());
4895
4896 EmitIsConstructCall(temp1, scratch0());
4897 EmitBranch(true_block, false_block, eq);
4898}
4899
4900
4901void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4902 ASSERT(!temp1.is(temp2));
4903 // Get the frame pointer for the calling frame.
4904 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4905
4906 // Skip the arguments adaptor frame if it exists.
4907 Label check_frame_marker;
4908 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4909 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4910 __ b(ne, &check_frame_marker);
4911 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4912
4913 // Check the marker in the calling frame.
4914 __ bind(&check_frame_marker);
4915 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4916 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4917}
4918
4919
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004920void LCodeGen::EnsureSpaceForLazyDeopt() {
4921 // Ensure that we have enough space after the previous lazy-bailout
4922 // instruction for patching the code here.
4923 int current_pc = masm()->pc_offset();
4924 int patch_size = Deoptimizer::patch_size();
4925 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4926 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
4927 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
4928 while (padding_size > 0) {
4929 __ nop();
4930 padding_size -= Assembler::kInstrSize;
4931 }
4932 }
4933 last_lazy_deopt_pc_ = masm()->pc_offset();
4934}
4935
4936
Ben Murdochb0fe1622011-05-05 13:52:32 +01004937void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004938 EnsureSpaceForLazyDeopt();
4939 ASSERT(instr->HasEnvironment());
4940 LEnvironment* env = instr->environment();
4941 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4942 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004943}
4944
4945
4946void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004947 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004948}
4949
4950
4951void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004952 Register object = ToRegister(instr->object());
4953 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004954 Register strict = scratch0();
4955 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4956 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004957 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4958 LPointerMap* pointers = instr->pointer_map();
Steve Block1e0659c2011-05-24 12:43:12 +01004959 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004960 SafepointGenerator safepoint_generator(
4961 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00004962 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4963}
4964
4965
4966void LCodeGen::DoIn(LIn* instr) {
4967 Register obj = ToRegister(instr->object());
4968 Register key = ToRegister(instr->key());
4969 __ Push(key, obj);
4970 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4971 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch257744e2011-11-30 15:57:28 +00004972 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004973 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00004974 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004975}
4976
4977
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004978void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004979 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4980 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4981 RecordSafepointWithLazyDeopt(
4982 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4983 ASSERT(instr->HasEnvironment());
4984 LEnvironment* env = instr->environment();
4985 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004986}
4987
4988
Ben Murdochb0fe1622011-05-05 13:52:32 +01004989void LCodeGen::DoStackCheck(LStackCheck* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004990 class DeferredStackCheck: public LDeferredCode {
4991 public:
4992 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4993 : LDeferredCode(codegen), instr_(instr) { }
4994 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004995 virtual LInstruction* instr() { return instr_; }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004996 private:
4997 LStackCheck* instr_;
4998 };
4999
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005000 ASSERT(instr->HasEnvironment());
5001 LEnvironment* env = instr->environment();
5002 // There is no LLazyBailout instruction for stack-checks. We have to
5003 // prepare for lazy deoptimization explicitly here.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005004 if (instr->hydrogen()->is_function_entry()) {
5005 // Perform stack overflow check.
5006 Label done;
5007 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5008 __ cmp(sp, Operand(ip));
5009 __ b(hs, &done);
5010 StackCheckStub stub;
5011 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005012 EnsureSpaceForLazyDeopt();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005013 __ bind(&done);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005014 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5015 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005016 } else {
5017 ASSERT(instr->hydrogen()->is_backwards_branch());
5018 // Perform stack overflow check if this goto needs it before jumping.
5019 DeferredStackCheck* deferred_stack_check =
5020 new DeferredStackCheck(this, instr);
5021 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5022 __ cmp(sp, Operand(ip));
5023 __ b(lo, deferred_stack_check->entry());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005024 EnsureSpaceForLazyDeopt();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005025 __ bind(instr->done_label());
5026 deferred_stack_check->SetExit(instr->done_label());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005027 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5028 // Don't record a deoptimization index for the safepoint here.
5029 // This will be done explicitly when emitting call and the safepoint in
5030 // the deferred code.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005031 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01005032}
5033
5034
5035void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01005036 // This is a pseudo-instruction that ensures that the environment here is
5037 // properly registered for deoptimization and records the assembler's PC
5038 // offset.
5039 LEnvironment* environment = instr->environment();
5040 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
5041 instr->SpilledDoubleRegisterArray());
5042
5043 // If the environment were already registered, we would have no way of
5044 // backpatching it with the spill slot operands.
5045 ASSERT(!environment->HasBeenRegistered());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005046 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Steve Block1e0659c2011-05-24 12:43:12 +01005047 ASSERT(osr_pc_offset_ == -1);
5048 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01005049}
5050
5051
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005052void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5053 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
5054 __ cmp(r0, ip);
5055 DeoptimizeIf(eq, instr->environment());
5056
5057 Register null_value = r5;
5058 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5059 __ cmp(r0, null_value);
5060 DeoptimizeIf(eq, instr->environment());
5061
5062 __ tst(r0, Operand(kSmiTagMask));
5063 DeoptimizeIf(eq, instr->environment());
5064
5065 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
5066 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
5067 DeoptimizeIf(le, instr->environment());
5068
5069 Label use_cache, call_runtime;
5070 __ CheckEnumCache(null_value, &call_runtime);
5071
5072 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
5073 __ b(&use_cache);
5074
5075 // Get the set of properties to enumerate.
5076 __ bind(&call_runtime);
5077 __ push(r0);
5078 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5079
5080 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
5081 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
5082 __ cmp(r1, ip);
5083 DeoptimizeIf(ne, instr->environment());
5084 __ bind(&use_cache);
5085}
5086
5087
5088void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5089 Register map = ToRegister(instr->map());
5090 Register result = ToRegister(instr->result());
5091 __ LoadInstanceDescriptors(map, result);
5092 __ ldr(result,
5093 FieldMemOperand(result, DescriptorArray::kEnumerationIndexOffset));
5094 __ ldr(result,
5095 FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
5096 __ cmp(result, Operand(0));
5097 DeoptimizeIf(eq, instr->environment());
5098}
5099
5100
5101void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5102 Register object = ToRegister(instr->value());
5103 Register map = ToRegister(instr->map());
5104 __ ldr(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
5105 __ cmp(map, scratch0());
5106 DeoptimizeIf(ne, instr->environment());
5107}
5108
5109
5110void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5111 Register object = ToRegister(instr->object());
5112 Register index = ToRegister(instr->index());
5113 Register result = ToRegister(instr->result());
5114 Register scratch = scratch0();
5115
5116 Label out_of_object, done;
5117 __ cmp(index, Operand(0));
5118 __ b(lt, &out_of_object);
5119
5120 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
5121 __ add(scratch, object, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
5122 __ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
5123
5124 __ b(&done);
5125
5126 __ bind(&out_of_object);
5127 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
5128 // Index is equal to negated out of object property index plus 1.
5129 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
5130 __ ldr(result, FieldMemOperand(scratch,
5131 FixedArray::kHeaderSize - kPointerSize));
5132 __ bind(&done);
5133}
Ben Murdoch257744e2011-11-30 15:57:28 +00005134
5135
Ben Murdochb0fe1622011-05-05 13:52:32 +01005136#undef __
5137
5138} } // namespace v8::internal