blob: d0531ec71ae7b35f1bb878cdcdb08dc5ae6569dd [file] [log] [blame]
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00001// Copyright 2012 the V8 project authors. All rights reserved.
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "mips/lithium-codegen-mips.h"
31#include "mips/lithium-gap-resolver-mips.h"
32#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
39class SafepointGenerator : public CallWrapper {
40 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
erikcorry0ad885c2011-11-21 13:51:57 +000043 Safepoint::DeoptMode mode)
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +000044 : codegen_(codegen),
45 pointers_(pointers),
erikcorry0ad885c2011-11-21 13:51:57 +000046 deopt_mode_(mode) { }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +000047 virtual ~SafepointGenerator() { }
48
erikcorry0ad885c2011-11-21 13:51:57 +000049 virtual void BeforeCall(int call_size) const { }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +000050
51 virtual void AfterCall() const {
erikcorry0ad885c2011-11-21 13:51:57 +000052 codegen_->RecordSafepoint(pointers_, deopt_mode_);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +000053 }
54
55 private:
56 LCodeGen* codegen_;
57 LPointerMap* pointers_;
erikcorry0ad885c2011-11-21 13:51:57 +000058 Safepoint::DeoptMode deopt_mode_;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +000059};
60
61
62#define __ masm()->
63
64bool LCodeGen::GenerateCode() {
ulan@chromium.org9a21ec42012-03-06 08:42:24 +000065 HPhase phase("Z_Code generation", chunk());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +000066 ASSERT(is_unused());
67 status_ = GENERATING;
68 CpuFeatures::Scope scope(FPU);
69
70 CodeStub::GenerateFPStubs();
71
72 // Open a frame scope to indicate that there is a frame on the stack. The
73 // NONE indicates that the scope shouldn't actually generate code to set up
74 // the frame (that is done in GeneratePrologue).
75 FrameScope frame_scope(masm_, StackFrame::NONE);
76
77 return GeneratePrologue() &&
78 GenerateBody() &&
79 GenerateDeferredCode() &&
80 GenerateSafepointTable();
81}
82
83
84void LCodeGen::FinishCode(Handle<Code> code) {
85 ASSERT(is_done());
86 code->set_stack_slots(GetStackSlotCount());
87 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
88 PopulateDeoptimizationData(code);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +000089}
90
91
92void LCodeGen::Abort(const char* format, ...) {
93 if (FLAG_trace_bailout) {
94 SmartArrayPointer<char> name(
95 info()->shared_info()->DebugName()->ToCString());
96 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
97 va_list arguments;
98 va_start(arguments, format);
99 OS::VPrint(format, arguments);
100 va_end(arguments);
101 PrintF("\n");
102 }
103 status_ = ABORTED;
104}
105
106
107void LCodeGen::Comment(const char* format, ...) {
108 if (!FLAG_code_comments) return;
109 char buffer[4 * KB];
110 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
111 va_list arguments;
112 va_start(arguments, format);
113 builder.AddFormattedList(format, arguments);
114 va_end(arguments);
115
116 // Copy the string before recording it in the assembler to avoid
117 // issues when the stack allocated buffer goes out of scope.
118 size_t length = builder.position();
119 Vector<char> copy = Vector<char>::New(length + 1);
120 memcpy(copy.start(), builder.Finalize(), copy.length());
121 masm()->RecordComment(copy.start());
122}
123
124
125bool LCodeGen::GeneratePrologue() {
126 ASSERT(is_generating());
127
128#ifdef DEBUG
129 if (strlen(FLAG_stop_at) > 0 &&
130 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
131 __ stop("stop_at");
132 }
133#endif
134
135 // a1: Callee's JS function.
136 // cp: Callee's context.
137 // fp: Caller's frame pointer.
138 // lr: Caller's pc.
139
140 // Strict mode functions and builtins need to replace the receiver
141 // with undefined when called as functions (without an explicit
142 // receiver object). r5 is zero for method calls and non-zero for
143 // function calls.
mstarzinger@chromium.org1b3afd12011-11-29 14:28:56 +0000144 if (!info_->is_classic_mode() || info_->is_native()) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000145 Label ok;
146 __ Branch(&ok, eq, t1, Operand(zero_reg));
147
148 int receiver_offset = scope()->num_parameters() * kPointerSize;
149 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
150 __ sw(a2, MemOperand(sp, receiver_offset));
151 __ bind(&ok);
152 }
153
154 __ Push(ra, fp, cp, a1);
155 __ Addu(fp, sp, Operand(2 * kPointerSize)); // Adj. FP to point to saved FP.
156
157 // Reserve space for the stack slots needed by the code.
158 int slots = GetStackSlotCount();
159 if (slots > 0) {
160 if (FLAG_debug_code) {
161 __ li(a0, Operand(slots));
162 __ li(a2, Operand(kSlotsZapValue));
163 Label loop;
164 __ bind(&loop);
165 __ push(a2);
166 __ Subu(a0, a0, 1);
167 __ Branch(&loop, ne, a0, Operand(zero_reg));
168 } else {
169 __ Subu(sp, sp, Operand(slots * kPointerSize));
170 }
171 }
172
173 // Possibly allocate a local context.
174 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
175 if (heap_slots > 0) {
176 Comment(";;; Allocate local context");
177 // Argument to NewContext is the function, which is in a1.
178 __ push(a1);
179 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
180 FastNewContextStub stub(heap_slots);
181 __ CallStub(&stub);
182 } else {
183 __ CallRuntime(Runtime::kNewFunctionContext, 1);
184 }
erikcorry0ad885c2011-11-21 13:51:57 +0000185 RecordSafepoint(Safepoint::kNoLazyDeopt);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000186 // Context is returned in both v0 and cp. It replaces the context
187 // passed to us. It's saved in the stack and kept live in cp.
188 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
189 // Copy any necessary parameters into the context.
190 int num_parameters = scope()->num_parameters();
191 for (int i = 0; i < num_parameters; i++) {
192 Variable* var = scope()->parameter(i);
193 if (var->IsContextSlot()) {
194 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
195 (num_parameters - 1 - i) * kPointerSize;
196 // Load parameter from stack.
197 __ lw(a0, MemOperand(fp, parameter_offset));
198 // Store it in the context.
199 MemOperand target = ContextOperand(cp, var->index());
200 __ sw(a0, target);
201 // Update the write barrier. This clobbers a3 and a0.
202 __ RecordWriteContextSlot(
203 cp, target.offset(), a0, a3, kRAHasBeenSaved, kSaveFPRegs);
204 }
205 }
206 Comment(";;; End allocate local context");
207 }
208
209 // Trace the call.
210 if (FLAG_trace) {
211 __ CallRuntime(Runtime::kTraceEnter, 0);
212 }
erikcorry0ad885c2011-11-21 13:51:57 +0000213 EnsureSpaceForLazyDeopt();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000214 return !is_aborted();
215}
216
217
218bool LCodeGen::GenerateBody() {
219 ASSERT(is_generating());
220 bool emit_instructions = true;
221 for (current_instruction_ = 0;
222 !is_aborted() && current_instruction_ < instructions_->length();
223 current_instruction_++) {
224 LInstruction* instr = instructions_->at(current_instruction_);
225 if (instr->IsLabel()) {
226 LLabel* label = LLabel::cast(instr);
227 emit_instructions = !label->HasReplacement();
228 }
229
230 if (emit_instructions) {
231 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
232 instr->CompileToNative(this);
233 }
234 }
235 return !is_aborted();
236}
237
238
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000239bool LCodeGen::GenerateDeferredCode() {
240 ASSERT(is_generating());
241 if (deferred_.length() > 0) {
242 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
243 LDeferredCode* code = deferred_[i];
244 __ bind(code->entry());
245 Comment(";;; Deferred code @%d: %s.",
246 code->instruction_index(),
247 code->instr()->Mnemonic());
248 code->Generate();
249 __ jmp(code->exit());
250 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000251 }
252 // Deferred code is the last part of the instruction sequence. Mark
253 // the generated code as done unless we bailed out.
254 if (!is_aborted()) status_ = DONE;
255 return !is_aborted();
256}
257
258
259bool LCodeGen::GenerateDeoptJumpTable() {
260 // TODO(plind): not clear that this will have advantage for MIPS.
261 // Skipping it for now. Raised issue #100 for this.
262 Abort("Unimplemented: %s", "GenerateDeoptJumpTable");
263 return false;
264}
265
266
267bool LCodeGen::GenerateSafepointTable() {
268 ASSERT(is_done());
269 safepoints_.Emit(masm(), GetStackSlotCount());
270 return !is_aborted();
271}
272
273
274Register LCodeGen::ToRegister(int index) const {
275 return Register::FromAllocationIndex(index);
276}
277
278
279DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
280 return DoubleRegister::FromAllocationIndex(index);
281}
282
283
284Register LCodeGen::ToRegister(LOperand* op) const {
285 ASSERT(op->IsRegister());
286 return ToRegister(op->index());
287}
288
289
290Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
291 if (op->IsRegister()) {
292 return ToRegister(op->index());
293 } else if (op->IsConstantOperand()) {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +0000294 LConstantOperand* const_op = LConstantOperand::cast(op);
295 Handle<Object> literal = chunk_->LookupLiteral(const_op);
296 Representation r = chunk_->LookupLiteralRepresentation(const_op);
297 if (r.IsInteger32()) {
298 ASSERT(literal->IsNumber());
299 __ li(scratch, Operand(static_cast<int32_t>(literal->Number())));
300 } else if (r.IsDouble()) {
301 Abort("EmitLoadRegister: Unsupported double immediate.");
302 } else {
303 ASSERT(r.IsTagged());
304 if (literal->IsSmi()) {
305 __ li(scratch, Operand(literal));
306 } else {
307 __ LoadHeapObject(scratch, Handle<HeapObject>::cast(literal));
308 }
309 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000310 return scratch;
311 } else if (op->IsStackSlot() || op->IsArgument()) {
312 __ lw(scratch, ToMemOperand(op));
313 return scratch;
314 }
315 UNREACHABLE();
316 return scratch;
317}
318
319
320DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
321 ASSERT(op->IsDoubleRegister());
322 return ToDoubleRegister(op->index());
323}
324
325
326DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
327 FloatRegister flt_scratch,
328 DoubleRegister dbl_scratch) {
329 if (op->IsDoubleRegister()) {
330 return ToDoubleRegister(op->index());
331 } else if (op->IsConstantOperand()) {
332 LConstantOperand* const_op = LConstantOperand::cast(op);
333 Handle<Object> literal = chunk_->LookupLiteral(const_op);
334 Representation r = chunk_->LookupLiteralRepresentation(const_op);
335 if (r.IsInteger32()) {
336 ASSERT(literal->IsNumber());
337 __ li(at, Operand(static_cast<int32_t>(literal->Number())));
338 __ mtc1(at, flt_scratch);
339 __ cvt_d_w(dbl_scratch, flt_scratch);
340 return dbl_scratch;
341 } else if (r.IsDouble()) {
342 Abort("unsupported double immediate");
343 } else if (r.IsTagged()) {
344 Abort("unsupported tagged immediate");
345 }
346 } else if (op->IsStackSlot() || op->IsArgument()) {
347 MemOperand mem_op = ToMemOperand(op);
348 __ ldc1(dbl_scratch, mem_op);
349 return dbl_scratch;
350 }
351 UNREACHABLE();
352 return dbl_scratch;
353}
354
355
rossberg@chromium.orgfab14982012-01-05 15:02:15 +0000356Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
357 Handle<Object> literal = chunk_->LookupLiteral(op);
358 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
359 return literal;
360}
361
362
363bool LCodeGen::IsInteger32(LConstantOperand* op) const {
364 return chunk_->LookupLiteralRepresentation(op).IsInteger32();
365}
366
367
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000368int LCodeGen::ToInteger32(LConstantOperand* op) const {
369 Handle<Object> value = chunk_->LookupLiteral(op);
370 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
371 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
372 value->Number());
373 return static_cast<int32_t>(value->Number());
374}
375
376
377double LCodeGen::ToDouble(LConstantOperand* op) const {
378 Handle<Object> value = chunk_->LookupLiteral(op);
379 return value->Number();
380}
381
382
383Operand LCodeGen::ToOperand(LOperand* op) {
384 if (op->IsConstantOperand()) {
385 LConstantOperand* const_op = LConstantOperand::cast(op);
386 Handle<Object> literal = chunk_->LookupLiteral(const_op);
387 Representation r = chunk_->LookupLiteralRepresentation(const_op);
388 if (r.IsInteger32()) {
389 ASSERT(literal->IsNumber());
390 return Operand(static_cast<int32_t>(literal->Number()));
391 } else if (r.IsDouble()) {
392 Abort("ToOperand Unsupported double immediate.");
393 }
394 ASSERT(r.IsTagged());
395 return Operand(literal);
396 } else if (op->IsRegister()) {
397 return Operand(ToRegister(op));
398 } else if (op->IsDoubleRegister()) {
399 Abort("ToOperand IsDoubleRegister unimplemented");
400 return Operand(0);
401 }
402 // Stack slots not implemented, use ToMemOperand instead.
403 UNREACHABLE();
404 return Operand(0);
405}
406
407
408MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
409 ASSERT(!op->IsRegister());
410 ASSERT(!op->IsDoubleRegister());
411 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
412 int index = op->index();
413 if (index >= 0) {
414 // Local or spill slot. Skip the frame pointer, function, and
415 // context in the fixed part of the frame.
416 return MemOperand(fp, -(index + 3) * kPointerSize);
417 } else {
418 // Incoming parameter. Skip the return address.
419 return MemOperand(fp, -(index - 1) * kPointerSize);
420 }
421}
422
423
424MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
425 ASSERT(op->IsDoubleStackSlot());
426 int index = op->index();
427 if (index >= 0) {
428 // Local or spill slot. Skip the frame pointer, function, context,
429 // and the first word of the double in the fixed part of the frame.
430 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
431 } else {
432 // Incoming parameter. Skip the return address and the first word of
433 // the double.
434 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
435 }
436}
437
438
439void LCodeGen::WriteTranslation(LEnvironment* environment,
440 Translation* translation) {
441 if (environment == NULL) return;
442
443 // The translation includes one command per value in the environment.
444 int translation_size = environment->values()->length();
445 // The output frame height does not include the parameters.
446 int height = translation_size - environment->parameter_count();
447
448 WriteTranslation(environment->outer(), translation);
449 int closure_id = DefineDeoptimizationLiteral(environment->closure());
ulan@chromium.org812308e2012-02-29 15:58:45 +0000450 switch (environment->frame_type()) {
451 case JS_FUNCTION:
452 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
453 break;
454 case JS_CONSTRUCT:
455 translation->BeginConstructStubFrame(closure_id, translation_size);
456 break;
457 case ARGUMENTS_ADAPTOR:
458 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
459 break;
460 default:
461 UNREACHABLE();
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000462 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000463 for (int i = 0; i < translation_size; ++i) {
464 LOperand* value = environment->values()->at(i);
465 // spilled_registers_ and spilled_double_registers_ are either
466 // both NULL or both set.
467 if (environment->spilled_registers() != NULL && value != NULL) {
468 if (value->IsRegister() &&
469 environment->spilled_registers()[value->index()] != NULL) {
470 translation->MarkDuplicate();
471 AddToTranslation(translation,
472 environment->spilled_registers()[value->index()],
473 environment->HasTaggedValueAt(i));
474 } else if (
475 value->IsDoubleRegister() &&
476 environment->spilled_double_registers()[value->index()] != NULL) {
477 translation->MarkDuplicate();
478 AddToTranslation(
479 translation,
480 environment->spilled_double_registers()[value->index()],
481 false);
482 }
483 }
484
485 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
486 }
487}
488
489
490void LCodeGen::AddToTranslation(Translation* translation,
491 LOperand* op,
492 bool is_tagged) {
493 if (op == NULL) {
494 // TODO(twuerthinger): Introduce marker operands to indicate that this value
495 // is not present and must be reconstructed from the deoptimizer. Currently
496 // this is only used for the arguments object.
497 translation->StoreArgumentsObject();
498 } else if (op->IsStackSlot()) {
499 if (is_tagged) {
500 translation->StoreStackSlot(op->index());
501 } else {
502 translation->StoreInt32StackSlot(op->index());
503 }
504 } else if (op->IsDoubleStackSlot()) {
505 translation->StoreDoubleStackSlot(op->index());
506 } else if (op->IsArgument()) {
507 ASSERT(is_tagged);
508 int src_index = GetStackSlotCount() + op->index();
509 translation->StoreStackSlot(src_index);
510 } else if (op->IsRegister()) {
511 Register reg = ToRegister(op);
512 if (is_tagged) {
513 translation->StoreRegister(reg);
514 } else {
515 translation->StoreInt32Register(reg);
516 }
517 } else if (op->IsDoubleRegister()) {
518 DoubleRegister reg = ToDoubleRegister(op);
519 translation->StoreDoubleRegister(reg);
520 } else if (op->IsConstantOperand()) {
521 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
522 int src_index = DefineDeoptimizationLiteral(literal);
523 translation->StoreLiteral(src_index);
524 } else {
525 UNREACHABLE();
526 }
527}
528
529
530void LCodeGen::CallCode(Handle<Code> code,
531 RelocInfo::Mode mode,
532 LInstruction* instr) {
533 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
534}
535
536
537void LCodeGen::CallCodeGeneric(Handle<Code> code,
538 RelocInfo::Mode mode,
539 LInstruction* instr,
540 SafepointMode safepoint_mode) {
541 ASSERT(instr != NULL);
542 LPointerMap* pointers = instr->pointer_map();
543 RecordPosition(pointers->position());
544 __ Call(code, mode);
erikcorry0ad885c2011-11-21 13:51:57 +0000545 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000546}
547
548
549void LCodeGen::CallRuntime(const Runtime::Function* function,
550 int num_arguments,
551 LInstruction* instr) {
552 ASSERT(instr != NULL);
553 LPointerMap* pointers = instr->pointer_map();
554 ASSERT(pointers != NULL);
555 RecordPosition(pointers->position());
556
557 __ CallRuntime(function, num_arguments);
erikcorry0ad885c2011-11-21 13:51:57 +0000558 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000559}
560
561
562void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
563 int argc,
564 LInstruction* instr) {
565 __ CallRuntimeSaveDoubles(id);
566 RecordSafepointWithRegisters(
erikcorry0ad885c2011-11-21 13:51:57 +0000567 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000568}
569
570
erikcorry0ad885c2011-11-21 13:51:57 +0000571void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
572 Safepoint::DeoptMode mode) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000573 if (!environment->HasBeenRegistered()) {
574 // Physical stack frame layout:
575 // -x ............. -4 0 ..................................... y
576 // [incoming arguments] [spill slots] [pushed outgoing arguments]
577
578 // Layout of the environment:
579 // 0 ..................................................... size-1
580 // [parameters] [locals] [expression stack including arguments]
581
582 // Layout of the translation:
583 // 0 ........................................................ size - 1 + 4
584 // [expression stack including arguments] [locals] [4 words] [parameters]
585 // |>------------ translation_size ------------<|
586
587 int frame_count = 0;
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000588 int jsframe_count = 0;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000589 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
590 ++frame_count;
ulan@chromium.org812308e2012-02-29 15:58:45 +0000591 if (e->frame_type() == JS_FUNCTION) {
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000592 ++jsframe_count;
593 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000594 }
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000595 Translation translation(&translations_, frame_count, jsframe_count);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000596 WriteTranslation(environment, &translation);
597 int deoptimization_index = deoptimizations_.length();
erikcorry0ad885c2011-11-21 13:51:57 +0000598 int pc_offset = masm()->pc_offset();
599 environment->Register(deoptimization_index,
600 translation.index(),
601 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000602 deoptimizations_.Add(environment);
603 }
604}
605
606
607void LCodeGen::DeoptimizeIf(Condition cc,
608 LEnvironment* environment,
609 Register src1,
610 const Operand& src2) {
erikcorry0ad885c2011-11-21 13:51:57 +0000611 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000612 ASSERT(environment->HasBeenRegistered());
613 int id = environment->deoptimization_index();
614 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
615 ASSERT(entry != NULL);
616 if (entry == NULL) {
617 Abort("bailout was not prepared");
618 return;
619 }
620
621 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS.
622
623 if (FLAG_deopt_every_n_times == 1 &&
624 info_->shared_info()->opt_count() == id) {
625 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
626 return;
627 }
628
629 if (FLAG_trap_on_deopt) {
630 Label skip;
631 if (cc != al) {
632 __ Branch(&skip, NegateCondition(cc), src1, src2);
633 }
634 __ stop("trap_on_deopt");
635 __ bind(&skip);
636 }
637
638 if (cc == al) {
639 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
640 } else {
641 // TODO(plind): The Arm port is a little different here, due to their
642 // DeOpt jump table, which is not used for Mips yet.
643 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2);
644 }
645}
646
647
648void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
649 int length = deoptimizations_.length();
650 if (length == 0) return;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000651 Handle<DeoptimizationInputData> data =
652 factory()->NewDeoptimizationInputData(length, TENURED);
653
654 Handle<ByteArray> translations = translations_.CreateByteArray();
655 data->SetTranslationByteArray(*translations);
656 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
657
658 Handle<FixedArray> literals =
659 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
660 for (int i = 0; i < deoptimization_literals_.length(); i++) {
661 literals->set(i, *deoptimization_literals_[i]);
662 }
663 data->SetLiteralArray(*literals);
664
665 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
666 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
667
668 // Populate the deoptimization entries.
669 for (int i = 0; i < length; i++) {
670 LEnvironment* env = deoptimizations_[i];
671 data->SetAstId(i, Smi::FromInt(env->ast_id()));
672 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
673 data->SetArgumentsStackHeight(i,
674 Smi::FromInt(env->arguments_stack_height()));
erikcorry0ad885c2011-11-21 13:51:57 +0000675 data->SetPc(i, Smi::FromInt(env->pc_offset()));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000676 }
677 code->set_deoptimization_data(*data);
678}
679
680
681int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
682 int result = deoptimization_literals_.length();
683 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
684 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
685 }
686 deoptimization_literals_.Add(literal);
687 return result;
688}
689
690
691void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
692 ASSERT(deoptimization_literals_.length() == 0);
693
694 const ZoneList<Handle<JSFunction> >* inlined_closures =
695 chunk()->inlined_closures();
696
697 for (int i = 0, length = inlined_closures->length();
698 i < length;
699 i++) {
700 DefineDeoptimizationLiteral(inlined_closures->at(i));
701 }
702
703 inlined_function_count_ = deoptimization_literals_.length();
704}
705
706
erikcorry0ad885c2011-11-21 13:51:57 +0000707void LCodeGen::RecordSafepointWithLazyDeopt(
708 LInstruction* instr, SafepointMode safepoint_mode) {
709 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
710 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
711 } else {
712 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
713 RecordSafepointWithRegisters(
714 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
715 }
716}
717
718
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000719void LCodeGen::RecordSafepoint(
720 LPointerMap* pointers,
721 Safepoint::Kind kind,
722 int arguments,
erikcorry0ad885c2011-11-21 13:51:57 +0000723 Safepoint::DeoptMode deopt_mode) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000724 ASSERT(expected_safepoint_kind_ == kind);
725
726 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
727 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
erikcorry0ad885c2011-11-21 13:51:57 +0000728 kind, arguments, deopt_mode);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000729 for (int i = 0; i < operands->length(); i++) {
730 LOperand* pointer = operands->at(i);
731 if (pointer->IsStackSlot()) {
732 safepoint.DefinePointerSlot(pointer->index());
733 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
734 safepoint.DefinePointerRegister(ToRegister(pointer));
735 }
736 }
737 if (kind & Safepoint::kWithRegisters) {
738 // Register cp always contains a pointer to the context.
739 safepoint.DefinePointerRegister(cp);
740 }
741}
742
743
744void LCodeGen::RecordSafepoint(LPointerMap* pointers,
erikcorry0ad885c2011-11-21 13:51:57 +0000745 Safepoint::DeoptMode deopt_mode) {
746 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000747}
748
749
erikcorry0ad885c2011-11-21 13:51:57 +0000750void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000751 LPointerMap empty_pointers(RelocInfo::kNoPosition);
erikcorry0ad885c2011-11-21 13:51:57 +0000752 RecordSafepoint(&empty_pointers, deopt_mode);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000753}
754
755
756void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
757 int arguments,
erikcorry0ad885c2011-11-21 13:51:57 +0000758 Safepoint::DeoptMode deopt_mode) {
759 RecordSafepoint(
760 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000761}
762
763
764void LCodeGen::RecordSafepointWithRegistersAndDoubles(
765 LPointerMap* pointers,
766 int arguments,
erikcorry0ad885c2011-11-21 13:51:57 +0000767 Safepoint::DeoptMode deopt_mode) {
768 RecordSafepoint(
769 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000770}
771
772
773void LCodeGen::RecordPosition(int position) {
774 if (position == RelocInfo::kNoPosition) return;
775 masm()->positions_recorder()->RecordPosition(position);
776}
777
778
779void LCodeGen::DoLabel(LLabel* label) {
780 if (label->is_loop_header()) {
781 Comment(";;; B%d - LOOP entry", label->block_id());
782 } else {
783 Comment(";;; B%d", label->block_id());
784 }
785 __ bind(label->label());
786 current_block_ = label->block_id();
787 DoGap(label);
788}
789
790
791void LCodeGen::DoParallelMove(LParallelMove* move) {
792 resolver_.Resolve(move);
793}
794
795
796void LCodeGen::DoGap(LGap* gap) {
797 for (int i = LGap::FIRST_INNER_POSITION;
798 i <= LGap::LAST_INNER_POSITION;
799 i++) {
800 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
801 LParallelMove* move = gap->GetParallelMove(inner_pos);
802 if (move != NULL) DoParallelMove(move);
803 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000804}
805
806
807void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
808 DoGap(instr);
809}
810
811
812void LCodeGen::DoParameter(LParameter* instr) {
813 // Nothing to do.
814}
815
816
817void LCodeGen::DoCallStub(LCallStub* instr) {
818 ASSERT(ToRegister(instr->result()).is(v0));
819 switch (instr->hydrogen()->major_key()) {
820 case CodeStub::RegExpConstructResult: {
821 RegExpConstructResultStub stub;
822 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
823 break;
824 }
825 case CodeStub::RegExpExec: {
826 RegExpExecStub stub;
827 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
828 break;
829 }
830 case CodeStub::SubString: {
831 SubStringStub stub;
832 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
833 break;
834 }
835 case CodeStub::NumberToString: {
836 NumberToStringStub stub;
837 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
838 break;
839 }
840 case CodeStub::StringAdd: {
841 StringAddStub stub(NO_STRING_ADD_FLAGS);
842 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
843 break;
844 }
845 case CodeStub::StringCompare: {
846 StringCompareStub stub;
847 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
848 break;
849 }
850 case CodeStub::TranscendentalCache: {
851 __ lw(a0, MemOperand(sp, 0));
852 TranscendentalCacheStub stub(instr->transcendental_type(),
853 TranscendentalCacheStub::TAGGED);
854 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
855 break;
856 }
857 default:
858 UNREACHABLE();
859 }
860}
861
862
863void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
864 // Nothing to do.
865}
866
867
868void LCodeGen::DoModI(LModI* instr) {
869 Register scratch = scratch0();
870 const Register left = ToRegister(instr->InputAt(0));
871 const Register result = ToRegister(instr->result());
872
erikcorry0ad885c2011-11-21 13:51:57 +0000873 Label done;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000874
erikcorry0ad885c2011-11-21 13:51:57 +0000875 if (instr->hydrogen()->HasPowerOf2Divisor()) {
876 Register scratch = scratch0();
877 ASSERT(!left.is(scratch));
878 __ mov(scratch, left);
879 int32_t p2constant = HConstant::cast(
880 instr->hydrogen()->right())->Integer32Value();
881 ASSERT(p2constant != 0);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000882 // Result always takes the sign of the dividend (left).
erikcorry0ad885c2011-11-21 13:51:57 +0000883 p2constant = abs(p2constant);
884
885 Label positive_dividend;
886 __ Branch(USE_DELAY_SLOT, &positive_dividend, ge, left, Operand(zero_reg));
887 __ subu(result, zero_reg, left);
888 __ And(result, result, p2constant - 1);
889 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
890 DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
891 }
892 __ Branch(USE_DELAY_SLOT, &done);
893 __ subu(result, zero_reg, result);
894 __ bind(&positive_dividend);
895 __ And(result, scratch, p2constant - 1);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000896 } else {
erikcorry0ad885c2011-11-21 13:51:57 +0000897 // div runs in the background while we check for special cases.
898 Register right = EmitLoadRegister(instr->InputAt(1), scratch);
899 __ div(left, right);
900
901 // Check for x % 0.
902 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
903 DeoptimizeIf(eq, instr->environment(), right, Operand(zero_reg));
904 }
905
906 __ Branch(USE_DELAY_SLOT, &done, ge, left, Operand(zero_reg));
907 __ mfhi(result);
908
909 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
910 DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
911 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000912 }
erikcorry0ad885c2011-11-21 13:51:57 +0000913 __ bind(&done);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +0000914}
915
916
917void LCodeGen::DoDivI(LDivI* instr) {
918 const Register left = ToRegister(instr->InputAt(0));
919 const Register right = ToRegister(instr->InputAt(1));
920 const Register result = ToRegister(instr->result());
921
922 // On MIPS div is asynchronous - it will run in the background while we
923 // check for special cases.
924 __ div(left, right);
925
926 // Check for x / 0.
927 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
928 DeoptimizeIf(eq, instr->environment(), right, Operand(zero_reg));
929 }
930
931 // Check for (0 / -x) that will produce negative zero.
932 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
933 Label left_not_zero;
934 __ Branch(&left_not_zero, ne, left, Operand(zero_reg));
935 DeoptimizeIf(lt, instr->environment(), right, Operand(zero_reg));
936 __ bind(&left_not_zero);
937 }
938
939 // Check for (-kMinInt / -1).
940 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
941 Label left_not_min_int;
942 __ Branch(&left_not_min_int, ne, left, Operand(kMinInt));
943 DeoptimizeIf(eq, instr->environment(), right, Operand(-1));
944 __ bind(&left_not_min_int);
945 }
946
947 __ mfhi(result);
948 DeoptimizeIf(ne, instr->environment(), result, Operand(zero_reg));
949 __ mflo(result);
950}
951
952
953void LCodeGen::DoMulI(LMulI* instr) {
954 Register scratch = scratch0();
955 Register result = ToRegister(instr->result());
956 // Note that result may alias left.
957 Register left = ToRegister(instr->InputAt(0));
958 LOperand* right_op = instr->InputAt(1);
959
960 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
961 bool bailout_on_minus_zero =
962 instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero);
963
964 if (right_op->IsConstantOperand() && !can_overflow) {
965 // Use optimized code for specific constants.
966 int32_t constant = ToInteger32(LConstantOperand::cast(right_op));
967
968 if (bailout_on_minus_zero && (constant < 0)) {
969 // The case of a null constant will be handled separately.
970 // If constant is negative and left is null, the result should be -0.
971 DeoptimizeIf(eq, instr->environment(), left, Operand(zero_reg));
972 }
973
974 switch (constant) {
975 case -1:
976 __ Subu(result, zero_reg, left);
977 break;
978 case 0:
979 if (bailout_on_minus_zero) {
980 // If left is strictly negative and the constant is null, the
981 // result is -0. Deoptimize if required, otherwise return 0.
982 DeoptimizeIf(lt, instr->environment(), left, Operand(zero_reg));
983 }
984 __ mov(result, zero_reg);
985 break;
986 case 1:
987 // Nothing to do.
988 __ Move(result, left);
989 break;
990 default:
991 // Multiplying by powers of two and powers of two plus or minus
992 // one can be done faster with shifted operands.
993 // For other constants we emit standard code.
994 int32_t mask = constant >> 31;
995 uint32_t constant_abs = (constant + mask) ^ mask;
996
997 if (IsPowerOf2(constant_abs) ||
998 IsPowerOf2(constant_abs - 1) ||
999 IsPowerOf2(constant_abs + 1)) {
1000 if (IsPowerOf2(constant_abs)) {
1001 int32_t shift = WhichPowerOf2(constant_abs);
1002 __ sll(result, left, shift);
1003 } else if (IsPowerOf2(constant_abs - 1)) {
1004 int32_t shift = WhichPowerOf2(constant_abs - 1);
1005 __ sll(result, left, shift);
1006 __ Addu(result, result, left);
1007 } else if (IsPowerOf2(constant_abs + 1)) {
1008 int32_t shift = WhichPowerOf2(constant_abs + 1);
1009 __ sll(result, left, shift);
1010 __ Subu(result, result, left);
1011 }
1012
1013 // Correct the sign of the result is the constant is negative.
1014 if (constant < 0) {
1015 __ Subu(result, zero_reg, result);
1016 }
1017
1018 } else {
1019 // Generate standard code.
1020 __ li(at, constant);
1021 __ mul(result, left, at);
1022 }
1023 }
1024
1025 } else {
1026 Register right = EmitLoadRegister(right_op, scratch);
1027 if (bailout_on_minus_zero) {
1028 __ Or(ToRegister(instr->TempAt(0)), left, right);
1029 }
1030
1031 if (can_overflow) {
1032 // hi:lo = left * right.
1033 __ mult(left, right);
1034 __ mfhi(scratch);
1035 __ mflo(result);
1036 __ sra(at, result, 31);
1037 DeoptimizeIf(ne, instr->environment(), scratch, Operand(at));
1038 } else {
1039 __ mul(result, left, right);
1040 }
1041
1042 if (bailout_on_minus_zero) {
1043 // Bail out if the result is supposed to be negative zero.
1044 Label done;
1045 __ Branch(&done, ne, result, Operand(zero_reg));
1046 DeoptimizeIf(lt,
1047 instr->environment(),
1048 ToRegister(instr->TempAt(0)),
1049 Operand(zero_reg));
1050 __ bind(&done);
1051 }
1052 }
1053}
1054
1055
1056void LCodeGen::DoBitI(LBitI* instr) {
1057 LOperand* left_op = instr->InputAt(0);
1058 LOperand* right_op = instr->InputAt(1);
1059 ASSERT(left_op->IsRegister());
1060 Register left = ToRegister(left_op);
1061 Register result = ToRegister(instr->result());
1062 Operand right(no_reg);
1063
1064 if (right_op->IsStackSlot() || right_op->IsArgument()) {
1065 right = Operand(EmitLoadRegister(right_op, at));
1066 } else {
1067 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand());
1068 right = ToOperand(right_op);
1069 }
1070
1071 switch (instr->op()) {
1072 case Token::BIT_AND:
1073 __ And(result, left, right);
1074 break;
1075 case Token::BIT_OR:
1076 __ Or(result, left, right);
1077 break;
1078 case Token::BIT_XOR:
1079 __ Xor(result, left, right);
1080 break;
1081 default:
1082 UNREACHABLE();
1083 break;
1084 }
1085}
1086
1087
1088void LCodeGen::DoShiftI(LShiftI* instr) {
1089 // Both 'left' and 'right' are "used at start" (see LCodeGen::DoShift), so
1090 // result may alias either of them.
1091 LOperand* right_op = instr->InputAt(1);
1092 Register left = ToRegister(instr->InputAt(0));
1093 Register result = ToRegister(instr->result());
1094
1095 if (right_op->IsRegister()) {
1096 // No need to mask the right operand on MIPS, it is built into the variable
1097 // shift instructions.
1098 switch (instr->op()) {
1099 case Token::SAR:
1100 __ srav(result, left, ToRegister(right_op));
1101 break;
1102 case Token::SHR:
1103 __ srlv(result, left, ToRegister(right_op));
1104 if (instr->can_deopt()) {
1105 DeoptimizeIf(lt, instr->environment(), result, Operand(zero_reg));
1106 }
1107 break;
1108 case Token::SHL:
1109 __ sllv(result, left, ToRegister(right_op));
1110 break;
1111 default:
1112 UNREACHABLE();
1113 break;
1114 }
1115 } else {
1116 // Mask the right_op operand.
1117 int value = ToInteger32(LConstantOperand::cast(right_op));
1118 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1119 switch (instr->op()) {
1120 case Token::SAR:
1121 if (shift_count != 0) {
1122 __ sra(result, left, shift_count);
1123 } else {
1124 __ Move(result, left);
1125 }
1126 break;
1127 case Token::SHR:
1128 if (shift_count != 0) {
1129 __ srl(result, left, shift_count);
1130 } else {
1131 if (instr->can_deopt()) {
1132 __ And(at, left, Operand(0x80000000));
1133 DeoptimizeIf(ne, instr->environment(), at, Operand(zero_reg));
1134 }
1135 __ Move(result, left);
1136 }
1137 break;
1138 case Token::SHL:
1139 if (shift_count != 0) {
1140 __ sll(result, left, shift_count);
1141 } else {
1142 __ Move(result, left);
1143 }
1144 break;
1145 default:
1146 UNREACHABLE();
1147 break;
1148 }
1149 }
1150}
1151
1152
1153void LCodeGen::DoSubI(LSubI* instr) {
1154 LOperand* left = instr->InputAt(0);
1155 LOperand* right = instr->InputAt(1);
1156 LOperand* result = instr->result();
1157 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1158
1159 if (!can_overflow) {
1160 if (right->IsStackSlot() || right->IsArgument()) {
1161 Register right_reg = EmitLoadRegister(right, at);
1162 __ Subu(ToRegister(result), ToRegister(left), Operand(right_reg));
1163 } else {
1164 ASSERT(right->IsRegister() || right->IsConstantOperand());
1165 __ Subu(ToRegister(result), ToRegister(left), ToOperand(right));
1166 }
1167 } else { // can_overflow.
1168 Register overflow = scratch0();
1169 Register scratch = scratch1();
1170 if (right->IsStackSlot() ||
1171 right->IsArgument() ||
1172 right->IsConstantOperand()) {
1173 Register right_reg = EmitLoadRegister(right, scratch);
1174 __ SubuAndCheckForOverflow(ToRegister(result),
1175 ToRegister(left),
1176 right_reg,
1177 overflow); // Reg at also used as scratch.
1178 } else {
1179 ASSERT(right->IsRegister());
1180 // Due to overflow check macros not supporting constant operands,
1181 // handling the IsConstantOperand case was moved to prev if clause.
1182 __ SubuAndCheckForOverflow(ToRegister(result),
1183 ToRegister(left),
1184 ToRegister(right),
1185 overflow); // Reg at also used as scratch.
1186 }
1187 DeoptimizeIf(lt, instr->environment(), overflow, Operand(zero_reg));
1188 }
1189}
1190
1191
1192void LCodeGen::DoConstantI(LConstantI* instr) {
1193 ASSERT(instr->result()->IsRegister());
1194 __ li(ToRegister(instr->result()), Operand(instr->value()));
1195}
1196
1197
1198void LCodeGen::DoConstantD(LConstantD* instr) {
1199 ASSERT(instr->result()->IsDoubleRegister());
1200 DoubleRegister result = ToDoubleRegister(instr->result());
1201 double v = instr->value();
1202 __ Move(result, v);
1203}
1204
1205
1206void LCodeGen::DoConstantT(LConstantT* instr) {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00001207 Handle<Object> value = instr->value();
1208 if (value->IsSmi()) {
1209 __ li(ToRegister(instr->result()), Operand(value));
1210 } else {
1211 __ LoadHeapObject(ToRegister(instr->result()),
1212 Handle<HeapObject>::cast(value));
1213 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001214}
1215
1216
1217void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1218 Register result = ToRegister(instr->result());
1219 Register array = ToRegister(instr->InputAt(0));
1220 __ lw(result, FieldMemOperand(array, JSArray::kLengthOffset));
1221}
1222
1223
1224void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
1225 Register result = ToRegister(instr->result());
1226 Register array = ToRegister(instr->InputAt(0));
1227 __ lw(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
1228}
1229
1230
1231void LCodeGen::DoElementsKind(LElementsKind* instr) {
1232 Register result = ToRegister(instr->result());
1233 Register input = ToRegister(instr->InputAt(0));
1234
1235 // Load map into |result|.
1236 __ lw(result, FieldMemOperand(input, HeapObject::kMapOffset));
1237 // Load the map's "bit field 2" into |result|. We only need the first byte,
1238 // but the following bit field extraction takes care of that anyway.
1239 __ lbu(result, FieldMemOperand(result, Map::kBitField2Offset));
1240 // Retrieve elements_kind from bit field 2.
1241 __ Ext(result, result, Map::kElementsKindShift, Map::kElementsKindBitCount);
1242}
1243
1244
1245void LCodeGen::DoValueOf(LValueOf* instr) {
1246 Register input = ToRegister(instr->InputAt(0));
1247 Register result = ToRegister(instr->result());
1248 Register map = ToRegister(instr->TempAt(0));
1249 Label done;
1250
1251 // If the object is a smi return the object.
1252 __ Move(result, input);
1253 __ JumpIfSmi(input, &done);
1254
1255 // If the object is not a value type, return the object.
1256 __ GetObjectType(input, map, map);
1257 __ Branch(&done, ne, map, Operand(JS_VALUE_TYPE));
1258 __ lw(result, FieldMemOperand(input, JSValue::kValueOffset));
1259
1260 __ bind(&done);
1261}
1262
1263
1264void LCodeGen::DoBitNotI(LBitNotI* instr) {
1265 Register input = ToRegister(instr->InputAt(0));
1266 Register result = ToRegister(instr->result());
1267 __ Nor(result, zero_reg, Operand(input));
1268}
1269
1270
1271void LCodeGen::DoThrow(LThrow* instr) {
1272 Register input_reg = EmitLoadRegister(instr->InputAt(0), at);
1273 __ push(input_reg);
1274 CallRuntime(Runtime::kThrow, 1, instr);
1275
1276 if (FLAG_debug_code) {
1277 __ stop("Unreachable code.");
1278 }
1279}
1280
1281
1282void LCodeGen::DoAddI(LAddI* instr) {
1283 LOperand* left = instr->InputAt(0);
1284 LOperand* right = instr->InputAt(1);
1285 LOperand* result = instr->result();
1286 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1287
1288 if (!can_overflow) {
1289 if (right->IsStackSlot() || right->IsArgument()) {
1290 Register right_reg = EmitLoadRegister(right, at);
1291 __ Addu(ToRegister(result), ToRegister(left), Operand(right_reg));
1292 } else {
1293 ASSERT(right->IsRegister() || right->IsConstantOperand());
1294 __ Addu(ToRegister(result), ToRegister(left), ToOperand(right));
1295 }
1296 } else { // can_overflow.
1297 Register overflow = scratch0();
1298 Register scratch = scratch1();
1299 if (right->IsStackSlot() ||
1300 right->IsArgument() ||
1301 right->IsConstantOperand()) {
1302 Register right_reg = EmitLoadRegister(right, scratch);
1303 __ AdduAndCheckForOverflow(ToRegister(result),
1304 ToRegister(left),
1305 right_reg,
1306 overflow); // Reg at also used as scratch.
1307 } else {
1308 ASSERT(right->IsRegister());
1309 // Due to overflow check macros not supporting constant operands,
1310 // handling the IsConstantOperand case was moved to prev if clause.
1311 __ AdduAndCheckForOverflow(ToRegister(result),
1312 ToRegister(left),
1313 ToRegister(right),
1314 overflow); // Reg at also used as scratch.
1315 }
1316 DeoptimizeIf(lt, instr->environment(), overflow, Operand(zero_reg));
1317 }
1318}
1319
1320
1321void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1322 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1323 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
1324 DoubleRegister result = ToDoubleRegister(instr->result());
1325 switch (instr->op()) {
1326 case Token::ADD:
1327 __ add_d(result, left, right);
1328 break;
1329 case Token::SUB:
1330 __ sub_d(result, left, right);
1331 break;
1332 case Token::MUL:
1333 __ mul_d(result, left, right);
1334 break;
1335 case Token::DIV:
1336 __ div_d(result, left, right);
1337 break;
1338 case Token::MOD: {
1339 // Save a0-a3 on the stack.
1340 RegList saved_regs = a0.bit() | a1.bit() | a2.bit() | a3.bit();
1341 __ MultiPush(saved_regs);
1342
1343 __ PrepareCallCFunction(0, 2, scratch0());
1344 __ SetCallCDoubleArguments(left, right);
1345 __ CallCFunction(
1346 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1347 0, 2);
1348 // Move the result in the double result register.
1349 __ GetCFunctionDoubleResult(result);
1350
1351 // Restore saved register.
1352 __ MultiPop(saved_regs);
1353 break;
1354 }
1355 default:
1356 UNREACHABLE();
1357 break;
1358 }
1359}
1360
1361
1362void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1363 ASSERT(ToRegister(instr->InputAt(0)).is(a1));
1364 ASSERT(ToRegister(instr->InputAt(1)).is(a0));
1365 ASSERT(ToRegister(instr->result()).is(v0));
1366
1367 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
1368 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1369 // Other arch use a nop here, to signal that there is no inlined
1370 // patchable code. Mips does not need the nop, since our marker
1371 // instruction (andi zero_reg) will never be used in normal code.
1372}
1373
1374
1375int LCodeGen::GetNextEmittedBlock(int block) {
1376 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1377 LLabel* label = chunk_->GetLabel(i);
1378 if (!label->HasReplacement()) return i;
1379 }
1380 return -1;
1381}
1382
1383
1384void LCodeGen::EmitBranch(int left_block, int right_block,
1385 Condition cc, Register src1, const Operand& src2) {
1386 int next_block = GetNextEmittedBlock(current_block_);
1387 right_block = chunk_->LookupDestination(right_block);
1388 left_block = chunk_->LookupDestination(left_block);
1389 if (right_block == left_block) {
1390 EmitGoto(left_block);
1391 } else if (left_block == next_block) {
1392 __ Branch(chunk_->GetAssemblyLabel(right_block),
1393 NegateCondition(cc), src1, src2);
1394 } else if (right_block == next_block) {
1395 __ Branch(chunk_->GetAssemblyLabel(left_block), cc, src1, src2);
1396 } else {
1397 __ Branch(chunk_->GetAssemblyLabel(left_block), cc, src1, src2);
1398 __ Branch(chunk_->GetAssemblyLabel(right_block));
1399 }
1400}
1401
1402
1403void LCodeGen::EmitBranchF(int left_block, int right_block,
1404 Condition cc, FPURegister src1, FPURegister src2) {
1405 int next_block = GetNextEmittedBlock(current_block_);
1406 right_block = chunk_->LookupDestination(right_block);
1407 left_block = chunk_->LookupDestination(left_block);
1408 if (right_block == left_block) {
1409 EmitGoto(left_block);
1410 } else if (left_block == next_block) {
1411 __ BranchF(chunk_->GetAssemblyLabel(right_block), NULL,
1412 NegateCondition(cc), src1, src2);
1413 } else if (right_block == next_block) {
1414 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, cc, src1, src2);
1415 } else {
1416 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, cc, src1, src2);
1417 __ Branch(chunk_->GetAssemblyLabel(right_block));
1418 }
1419}
1420
1421
1422void LCodeGen::DoBranch(LBranch* instr) {
1423 int true_block = chunk_->LookupDestination(instr->true_block_id());
1424 int false_block = chunk_->LookupDestination(instr->false_block_id());
1425
1426 Representation r = instr->hydrogen()->value()->representation();
1427 if (r.IsInteger32()) {
1428 Register reg = ToRegister(instr->InputAt(0));
1429 EmitBranch(true_block, false_block, ne, reg, Operand(zero_reg));
1430 } else if (r.IsDouble()) {
1431 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
1432 // Test the double value. Zero and NaN are false.
1433 EmitBranchF(true_block, false_block, ne, reg, kDoubleRegZero);
1434 } else {
1435 ASSERT(r.IsTagged());
1436 Register reg = ToRegister(instr->InputAt(0));
1437 HType type = instr->hydrogen()->value()->type();
1438 if (type.IsBoolean()) {
1439 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1440 EmitBranch(true_block, false_block, eq, reg, Operand(at));
1441 } else if (type.IsSmi()) {
1442 EmitBranch(true_block, false_block, ne, reg, Operand(zero_reg));
1443 } else {
1444 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1445 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1446
1447 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1448 // Avoid deopts in the case where we've never executed this path before.
1449 if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
1450
1451 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1452 // undefined -> false.
1453 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1454 __ Branch(false_label, eq, reg, Operand(at));
1455 }
1456 if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1457 // Boolean -> its value.
1458 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1459 __ Branch(true_label, eq, reg, Operand(at));
1460 __ LoadRoot(at, Heap::kFalseValueRootIndex);
1461 __ Branch(false_label, eq, reg, Operand(at));
1462 }
1463 if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1464 // 'null' -> false.
1465 __ LoadRoot(at, Heap::kNullValueRootIndex);
1466 __ Branch(false_label, eq, reg, Operand(at));
1467 }
1468
1469 if (expected.Contains(ToBooleanStub::SMI)) {
1470 // Smis: 0 -> false, all other -> true.
1471 __ Branch(false_label, eq, reg, Operand(zero_reg));
1472 __ JumpIfSmi(reg, true_label);
1473 } else if (expected.NeedsMap()) {
1474 // If we need a map later and have a Smi -> deopt.
1475 __ And(at, reg, Operand(kSmiTagMask));
1476 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
1477 }
1478
1479 const Register map = scratch0();
1480 if (expected.NeedsMap()) {
1481 __ lw(map, FieldMemOperand(reg, HeapObject::kMapOffset));
1482 if (expected.CanBeUndetectable()) {
1483 // Undetectable -> false.
1484 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset));
1485 __ And(at, at, Operand(1 << Map::kIsUndetectable));
1486 __ Branch(false_label, ne, at, Operand(zero_reg));
1487 }
1488 }
1489
1490 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1491 // spec object -> true.
1492 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
1493 __ Branch(true_label, ge, at, Operand(FIRST_SPEC_OBJECT_TYPE));
1494 }
1495
1496 if (expected.Contains(ToBooleanStub::STRING)) {
1497 // String value -> false iff empty.
1498 Label not_string;
1499 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
1500 __ Branch(&not_string, ge , at, Operand(FIRST_NONSTRING_TYPE));
1501 __ lw(at, FieldMemOperand(reg, String::kLengthOffset));
1502 __ Branch(true_label, ne, at, Operand(zero_reg));
1503 __ Branch(false_label);
1504 __ bind(&not_string);
1505 }
1506
1507 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1508 // heap number -> false iff +0, -0, or NaN.
1509 DoubleRegister dbl_scratch = double_scratch0();
1510 Label not_heap_number;
1511 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1512 __ Branch(&not_heap_number, ne, map, Operand(at));
1513 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
1514 __ BranchF(true_label, false_label, ne, dbl_scratch, kDoubleRegZero);
1515 // Falls through if dbl_scratch == 0.
1516 __ Branch(false_label);
1517 __ bind(&not_heap_number);
1518 }
1519
1520 // We've seen something for the first time -> deopt.
1521 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg));
1522 }
1523 }
1524}
1525
1526
1527void LCodeGen::EmitGoto(int block) {
1528 block = chunk_->LookupDestination(block);
1529 int next_block = GetNextEmittedBlock(current_block_);
1530 if (block != next_block) {
1531 __ jmp(chunk_->GetAssemblyLabel(block));
1532 }
1533}
1534
1535
1536void LCodeGen::DoGoto(LGoto* instr) {
1537 EmitGoto(instr->block_id());
1538}
1539
1540
1541Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1542 Condition cond = kNoCondition;
1543 switch (op) {
1544 case Token::EQ:
1545 case Token::EQ_STRICT:
1546 cond = eq;
1547 break;
1548 case Token::LT:
1549 cond = is_unsigned ? lo : lt;
1550 break;
1551 case Token::GT:
1552 cond = is_unsigned ? hi : gt;
1553 break;
1554 case Token::LTE:
1555 cond = is_unsigned ? ls : le;
1556 break;
1557 case Token::GTE:
1558 cond = is_unsigned ? hs : ge;
1559 break;
1560 case Token::IN:
1561 case Token::INSTANCEOF:
1562 default:
1563 UNREACHABLE();
1564 }
1565 return cond;
1566}
1567
1568
1569void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1570 LOperand* left = instr->InputAt(0);
1571 LOperand* right = instr->InputAt(1);
1572 int false_block = chunk_->LookupDestination(instr->false_block_id());
1573 int true_block = chunk_->LookupDestination(instr->true_block_id());
1574
1575 Condition cond = TokenToCondition(instr->op(), false);
1576
1577 if (left->IsConstantOperand() && right->IsConstantOperand()) {
1578 // We can statically evaluate the comparison.
1579 double left_val = ToDouble(LConstantOperand::cast(left));
1580 double right_val = ToDouble(LConstantOperand::cast(right));
1581 int next_block =
1582 EvalComparison(instr->op(), left_val, right_val) ? true_block
1583 : false_block;
1584 EmitGoto(next_block);
1585 } else {
1586 if (instr->is_double()) {
1587 // Compare left and right as doubles and load the
1588 // resulting flags into the normal status register.
1589 FPURegister left_reg = ToDoubleRegister(left);
1590 FPURegister right_reg = ToDoubleRegister(right);
1591
1592 // If a NaN is involved, i.e. the result is unordered,
1593 // jump to false block label.
1594 __ BranchF(NULL, chunk_->GetAssemblyLabel(false_block), eq,
1595 left_reg, right_reg);
1596
1597 EmitBranchF(true_block, false_block, cond, left_reg, right_reg);
1598 } else {
1599 Register cmp_left;
1600 Operand cmp_right = Operand(0);
1601
1602 if (right->IsConstantOperand()) {
1603 cmp_left = ToRegister(left);
1604 cmp_right = Operand(ToInteger32(LConstantOperand::cast(right)));
1605 } else if (left->IsConstantOperand()) {
1606 cmp_left = ToRegister(right);
1607 cmp_right = Operand(ToInteger32(LConstantOperand::cast(left)));
1608 // We transposed the operands. Reverse the condition.
1609 cond = ReverseCondition(cond);
1610 } else {
1611 cmp_left = ToRegister(left);
1612 cmp_right = Operand(ToRegister(right));
1613 }
1614
1615 EmitBranch(true_block, false_block, cond, cmp_left, cmp_right);
1616 }
1617 }
1618}
1619
1620
1621void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
1622 Register left = ToRegister(instr->InputAt(0));
1623 Register right = ToRegister(instr->InputAt(1));
1624 int false_block = chunk_->LookupDestination(instr->false_block_id());
1625 int true_block = chunk_->LookupDestination(instr->true_block_id());
1626
1627 EmitBranch(true_block, false_block, eq, left, Operand(right));
1628}
1629
1630
1631void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1632 Register left = ToRegister(instr->InputAt(0));
1633 int true_block = chunk_->LookupDestination(instr->true_block_id());
1634 int false_block = chunk_->LookupDestination(instr->false_block_id());
1635
1636 EmitBranch(true_block, false_block, eq, left,
1637 Operand(instr->hydrogen()->right()));
1638}
1639
1640
1641
1642void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1643 Register scratch = scratch0();
1644 Register reg = ToRegister(instr->InputAt(0));
1645 int false_block = chunk_->LookupDestination(instr->false_block_id());
1646
1647 // If the expression is known to be untagged or a smi, then it's definitely
1648 // not null, and it can't be a an undetectable object.
1649 if (instr->hydrogen()->representation().IsSpecialization() ||
1650 instr->hydrogen()->type().IsSmi()) {
1651 EmitGoto(false_block);
1652 return;
1653 }
1654
1655 int true_block = chunk_->LookupDestination(instr->true_block_id());
1656
1657 Heap::RootListIndex nil_value = instr->nil() == kNullValue ?
1658 Heap::kNullValueRootIndex :
1659 Heap::kUndefinedValueRootIndex;
1660 __ LoadRoot(at, nil_value);
1661 if (instr->kind() == kStrictEquality) {
1662 EmitBranch(true_block, false_block, eq, reg, Operand(at));
1663 } else {
1664 Heap::RootListIndex other_nil_value = instr->nil() == kNullValue ?
1665 Heap::kUndefinedValueRootIndex :
1666 Heap::kNullValueRootIndex;
1667 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1668 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1669 __ Branch(USE_DELAY_SLOT, true_label, eq, reg, Operand(at));
1670 __ LoadRoot(at, other_nil_value); // In the delay slot.
1671 __ Branch(USE_DELAY_SLOT, true_label, eq, reg, Operand(at));
1672 __ JumpIfSmi(reg, false_label); // In the delay slot.
1673 // Check for undetectable objects by looking in the bit field in
1674 // the map. The object has already been smi checked.
1675 __ lw(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1676 __ lbu(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1677 __ And(scratch, scratch, 1 << Map::kIsUndetectable);
1678 EmitBranch(true_block, false_block, ne, scratch, Operand(zero_reg));
1679 }
1680}
1681
1682
1683Condition LCodeGen::EmitIsObject(Register input,
1684 Register temp1,
erikcorry0ad885c2011-11-21 13:51:57 +00001685 Register temp2,
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001686 Label* is_not_object,
1687 Label* is_object) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001688 __ JumpIfSmi(input, is_not_object);
1689
1690 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
1691 __ Branch(is_object, eq, input, Operand(temp2));
1692
1693 // Load map.
1694 __ lw(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1695 // Undetectable objects behave like undefined.
1696 __ lbu(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1697 __ And(temp2, temp2, Operand(1 << Map::kIsUndetectable));
1698 __ Branch(is_not_object, ne, temp2, Operand(zero_reg));
1699
1700 // Load instance type and check that it is in object type range.
1701 __ lbu(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
1702 __ Branch(is_not_object,
1703 lt, temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1704
1705 return le;
1706}
1707
1708
1709void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1710 Register reg = ToRegister(instr->InputAt(0));
1711 Register temp1 = ToRegister(instr->TempAt(0));
1712 Register temp2 = scratch0();
1713
1714 int true_block = chunk_->LookupDestination(instr->true_block_id());
1715 int false_block = chunk_->LookupDestination(instr->false_block_id());
1716 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1717 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1718
1719 Condition true_cond =
erikcorry0ad885c2011-11-21 13:51:57 +00001720 EmitIsObject(reg, temp1, temp2, false_label, true_label);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001721
1722 EmitBranch(true_block, false_block, true_cond, temp2,
1723 Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
1724}
1725
1726
mstarzinger@chromium.orgf8c6bd52011-11-23 12:13:52 +00001727Condition LCodeGen::EmitIsString(Register input,
1728 Register temp1,
1729 Label* is_not_string) {
1730 __ JumpIfSmi(input, is_not_string);
1731 __ GetObjectType(input, temp1, temp1);
1732
1733 return lt;
1734}
1735
1736
1737void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1738 Register reg = ToRegister(instr->InputAt(0));
1739 Register temp1 = ToRegister(instr->TempAt(0));
1740
1741 int true_block = chunk_->LookupDestination(instr->true_block_id());
1742 int false_block = chunk_->LookupDestination(instr->false_block_id());
1743 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1744
1745 Condition true_cond =
1746 EmitIsString(reg, temp1, false_label);
1747
1748 EmitBranch(true_block, false_block, true_cond, temp1,
1749 Operand(FIRST_NONSTRING_TYPE));
1750}
1751
1752
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001753void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1754 int true_block = chunk_->LookupDestination(instr->true_block_id());
1755 int false_block = chunk_->LookupDestination(instr->false_block_id());
1756
1757 Register input_reg = EmitLoadRegister(instr->InputAt(0), at);
1758 __ And(at, input_reg, kSmiTagMask);
1759 EmitBranch(true_block, false_block, eq, at, Operand(zero_reg));
1760}
1761
1762
1763void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1764 Register input = ToRegister(instr->InputAt(0));
1765 Register temp = ToRegister(instr->TempAt(0));
1766
1767 int true_block = chunk_->LookupDestination(instr->true_block_id());
1768 int false_block = chunk_->LookupDestination(instr->false_block_id());
1769
1770 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
1771 __ lw(temp, FieldMemOperand(input, HeapObject::kMapOffset));
1772 __ lbu(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
1773 __ And(at, temp, Operand(1 << Map::kIsUndetectable));
1774 EmitBranch(true_block, false_block, ne, at, Operand(zero_reg));
1775}
1776
1777
mstarzinger@chromium.orgf8c6bd52011-11-23 12:13:52 +00001778static Condition ComputeCompareCondition(Token::Value op) {
1779 switch (op) {
1780 case Token::EQ_STRICT:
1781 case Token::EQ:
1782 return eq;
1783 case Token::LT:
1784 return lt;
1785 case Token::GT:
1786 return gt;
1787 case Token::LTE:
1788 return le;
1789 case Token::GTE:
1790 return ge;
1791 default:
1792 UNREACHABLE();
1793 return kNoCondition;
1794 }
1795}
1796
1797
1798void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
1799 Token::Value op = instr->op();
1800 int true_block = chunk_->LookupDestination(instr->true_block_id());
1801 int false_block = chunk_->LookupDestination(instr->false_block_id());
1802
1803 Handle<Code> ic = CompareIC::GetUninitialized(op);
1804 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1805
1806 Condition condition = ComputeCompareCondition(op);
1807
1808 EmitBranch(true_block, false_block, condition, v0, Operand(zero_reg));
1809}
1810
1811
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001812static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
1813 InstanceType from = instr->from();
1814 InstanceType to = instr->to();
1815 if (from == FIRST_TYPE) return to;
1816 ASSERT(from == to || to == LAST_TYPE);
1817 return from;
1818}
1819
1820
1821static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
1822 InstanceType from = instr->from();
1823 InstanceType to = instr->to();
1824 if (from == to) return eq;
1825 if (to == LAST_TYPE) return hs;
1826 if (from == FIRST_TYPE) return ls;
1827 UNREACHABLE();
1828 return eq;
1829}
1830
1831
1832void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1833 Register scratch = scratch0();
1834 Register input = ToRegister(instr->InputAt(0));
1835
1836 int true_block = chunk_->LookupDestination(instr->true_block_id());
1837 int false_block = chunk_->LookupDestination(instr->false_block_id());
1838
1839 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1840
1841 __ JumpIfSmi(input, false_label);
1842
1843 __ GetObjectType(input, scratch, scratch);
1844 EmitBranch(true_block,
1845 false_block,
1846 BranchCondition(instr->hydrogen()),
1847 scratch,
1848 Operand(TestType(instr->hydrogen())));
1849}
1850
1851
1852void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1853 Register input = ToRegister(instr->InputAt(0));
1854 Register result = ToRegister(instr->result());
1855
1856 if (FLAG_debug_code) {
1857 __ AbortIfNotString(input);
1858 }
1859
1860 __ lw(result, FieldMemOperand(input, String::kHashFieldOffset));
1861 __ IndexFromHash(result, result);
1862}
1863
1864
1865void LCodeGen::DoHasCachedArrayIndexAndBranch(
1866 LHasCachedArrayIndexAndBranch* instr) {
1867 Register input = ToRegister(instr->InputAt(0));
1868 Register scratch = scratch0();
1869
1870 int true_block = chunk_->LookupDestination(instr->true_block_id());
1871 int false_block = chunk_->LookupDestination(instr->false_block_id());
1872
1873 __ lw(scratch,
1874 FieldMemOperand(input, String::kHashFieldOffset));
1875 __ And(at, scratch, Operand(String::kContainsCachedArrayIndexMask));
1876 EmitBranch(true_block, false_block, eq, at, Operand(zero_reg));
1877}
1878
1879
ulan@chromium.org2efb9002012-01-19 15:36:35 +00001880// Branches to a label or falls through with the answer in flags. Trashes
1881// the temp registers, but not the input.
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001882void LCodeGen::EmitClassOfTest(Label* is_true,
1883 Label* is_false,
1884 Handle<String>class_name,
1885 Register input,
1886 Register temp,
1887 Register temp2) {
1888 ASSERT(!input.is(temp));
ulan@chromium.org2efb9002012-01-19 15:36:35 +00001889 ASSERT(!input.is(temp2));
1890 ASSERT(!temp.is(temp2));
1891
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00001892 __ JumpIfSmi(input, is_false);
1893
1894 if (class_name->IsEqualTo(CStrVector("Function"))) {
1895 // Assuming the following assertions, we can use the same compares to test
1896 // for both being a function type and being in the object type range.
1897 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
1898 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1899 FIRST_SPEC_OBJECT_TYPE + 1);
1900 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1901 LAST_SPEC_OBJECT_TYPE - 1);
1902 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1903
1904 __ GetObjectType(input, temp, temp2);
1905 __ Branch(is_false, lt, temp2, Operand(FIRST_SPEC_OBJECT_TYPE));
1906 __ Branch(is_true, eq, temp2, Operand(FIRST_SPEC_OBJECT_TYPE));
1907 __ Branch(is_true, eq, temp2, Operand(LAST_SPEC_OBJECT_TYPE));
1908 } else {
1909 // Faster code path to avoid two compares: subtract lower bound from the
1910 // actual type and do a signed compare with the width of the type range.
1911 __ GetObjectType(input, temp, temp2);
1912 __ Subu(temp2, temp2, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1913 __ Branch(is_false, gt, temp2, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
1914 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1915 }
1916
1917 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
1918 // Check if the constructor in the map is a function.
1919 __ lw(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1920
1921 // Objects with a non-function constructor have class 'Object'.
1922 __ GetObjectType(temp, temp2, temp2);
1923 if (class_name->IsEqualTo(CStrVector("Object"))) {
1924 __ Branch(is_true, ne, temp2, Operand(JS_FUNCTION_TYPE));
1925 } else {
1926 __ Branch(is_false, ne, temp2, Operand(JS_FUNCTION_TYPE));
1927 }
1928
1929 // temp now contains the constructor function. Grab the
1930 // instance class name from there.
1931 __ lw(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1932 __ lw(temp, FieldMemOperand(temp,
1933 SharedFunctionInfo::kInstanceClassNameOffset));
1934 // The class name we are testing against is a symbol because it's a literal.
1935 // The name in the constructor is a symbol because of the way the context is
1936 // booted. This routine isn't expected to work for random API-created
1937 // classes and it doesn't have to because you can't access it with natives
1938 // syntax. Since both sides are symbols it is sufficient to use an identity
1939 // comparison.
1940
1941 // End with the address of this class_name instance in temp register.
1942 // On MIPS, the caller must do the comparison with Handle<String>class_name.
1943}
1944
1945
1946void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1947 Register input = ToRegister(instr->InputAt(0));
1948 Register temp = scratch0();
1949 Register temp2 = ToRegister(instr->TempAt(0));
1950 Handle<String> class_name = instr->hydrogen()->class_name();
1951
1952 int true_block = chunk_->LookupDestination(instr->true_block_id());
1953 int false_block = chunk_->LookupDestination(instr->false_block_id());
1954
1955 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1956 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1957
1958 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1959
1960 EmitBranch(true_block, false_block, eq, temp, Operand(class_name));
1961}
1962
1963
1964void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1965 Register reg = ToRegister(instr->InputAt(0));
1966 Register temp = ToRegister(instr->TempAt(0));
1967 int true_block = instr->true_block_id();
1968 int false_block = instr->false_block_id();
1969
1970 __ lw(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1971 EmitBranch(true_block, false_block, eq, temp, Operand(instr->map()));
1972}
1973
1974
1975void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1976 Label true_label, done;
1977 ASSERT(ToRegister(instr->InputAt(0)).is(a0)); // Object is in a0.
1978 ASSERT(ToRegister(instr->InputAt(1)).is(a1)); // Function is in a1.
1979 Register result = ToRegister(instr->result());
1980 ASSERT(result.is(v0));
1981
1982 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1983 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1984
1985 __ Branch(&true_label, eq, result, Operand(zero_reg));
1986 __ li(result, Operand(factory()->false_value()));
1987 __ Branch(&done);
1988 __ bind(&true_label);
1989 __ li(result, Operand(factory()->true_value()));
1990 __ bind(&done);
1991}
1992
1993
1994void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1995 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1996 public:
1997 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1998 LInstanceOfKnownGlobal* instr)
1999 : LDeferredCode(codegen), instr_(instr) { }
2000 virtual void Generate() {
erikcorry0ad885c2011-11-21 13:51:57 +00002001 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002002 }
2003 virtual LInstruction* instr() { return instr_; }
2004 Label* map_check() { return &map_check_; }
2005
2006 private:
2007 LInstanceOfKnownGlobal* instr_;
2008 Label map_check_;
2009 };
2010
2011 DeferredInstanceOfKnownGlobal* deferred;
2012 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2013
2014 Label done, false_result;
2015 Register object = ToRegister(instr->InputAt(0));
2016 Register temp = ToRegister(instr->TempAt(0));
2017 Register result = ToRegister(instr->result());
2018
2019 ASSERT(object.is(a0));
2020 ASSERT(result.is(v0));
2021
2022 // A Smi is not instance of anything.
2023 __ JumpIfSmi(object, &false_result);
2024
2025 // This is the inlined call site instanceof cache. The two occurences of the
2026 // hole value will be patched to the last map/result pair generated by the
2027 // instanceof stub.
2028 Label cache_miss;
2029 Register map = temp;
2030 __ lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
2031
2032 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
2033 __ bind(deferred->map_check()); // Label for calculating code patching.
2034 // We use Factory::the_hole_value() on purpose instead of loading from the
2035 // root array to force relocation to be able to later patch with
2036 // the cached map.
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00002037 Handle<JSGlobalPropertyCell> cell =
2038 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
2039 __ li(at, Operand(Handle<Object>(cell)));
2040 __ lw(at, FieldMemOperand(at, JSGlobalPropertyCell::kValueOffset));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002041 __ Branch(&cache_miss, ne, map, Operand(at));
2042 // We use Factory::the_hole_value() on purpose instead of loading from the
2043 // root array to force relocation to be able to later patch
2044 // with true or false.
2045 __ li(result, Operand(factory()->the_hole_value()), true);
2046 __ Branch(&done);
2047
2048 // The inlined call site cache did not match. Check null and string before
2049 // calling the deferred code.
2050 __ bind(&cache_miss);
2051 // Null is not instance of anything.
2052 __ LoadRoot(temp, Heap::kNullValueRootIndex);
2053 __ Branch(&false_result, eq, object, Operand(temp));
2054
2055 // String values is not instance of anything.
2056 Condition cc = __ IsObjectStringType(object, temp, temp);
2057 __ Branch(&false_result, cc, temp, Operand(zero_reg));
2058
2059 // Go to the deferred code.
2060 __ Branch(deferred->entry());
2061
2062 __ bind(&false_result);
2063 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2064
2065 // Here result has either true or false. Deferred code also produces true or
2066 // false object.
2067 __ bind(deferred->exit());
2068 __ bind(&done);
2069}
2070
2071
erikcorry0ad885c2011-11-21 13:51:57 +00002072void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2073 Label* map_check) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002074 Register result = ToRegister(instr->result());
2075 ASSERT(result.is(v0));
2076
2077 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2078 flags = static_cast<InstanceofStub::Flags>(
2079 flags | InstanceofStub::kArgsInRegisters);
2080 flags = static_cast<InstanceofStub::Flags>(
2081 flags | InstanceofStub::kCallSiteInlineCheck);
2082 flags = static_cast<InstanceofStub::Flags>(
2083 flags | InstanceofStub::kReturnTrueFalseObject);
2084 InstanceofStub stub(flags);
2085
2086 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
2087
2088 // Get the temp register reserved by the instruction. This needs to be t0 as
2089 // its slot of the pushing of safepoint registers is used to communicate the
2090 // offset to the location of the map check.
2091 Register temp = ToRegister(instr->TempAt(0));
2092 ASSERT(temp.is(t0));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002093 __ LoadHeapObject(InstanceofStub::right(), instr->function());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002094 static const int kAdditionalDelta = 7;
2095 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2096 Label before_push_delta;
2097 __ bind(&before_push_delta);
2098 {
2099 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
2100 __ li(temp, Operand(delta * kPointerSize), true);
2101 __ StoreToSafepointRegisterSlot(temp, temp);
2102 }
2103 CallCodeGeneric(stub.GetCode(),
2104 RelocInfo::CODE_TARGET,
2105 instr,
2106 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
erikcorry0ad885c2011-11-21 13:51:57 +00002107 ASSERT(instr->HasDeoptimizationEnvironment());
2108 LEnvironment* env = instr->deoptimization_environment();
2109 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002110 // Put the result value into the result register slot and
2111 // restore all registers.
2112 __ StoreToSafepointRegisterSlot(result, result);
2113}
2114
2115
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002116void LCodeGen::DoCmpT(LCmpT* instr) {
2117 Token::Value op = instr->op();
2118
2119 Handle<Code> ic = CompareIC::GetUninitialized(op);
2120 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2121 // On MIPS there is no need for a "no inlined smi code" marker (nop).
2122
2123 Condition condition = ComputeCompareCondition(op);
2124 // A minor optimization that relies on LoadRoot always emitting one
2125 // instruction.
2126 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm());
2127 Label done;
2128 __ Branch(USE_DELAY_SLOT, &done, condition, v0, Operand(zero_reg));
2129 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2130 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2131 ASSERT_EQ(3, masm()->InstructionsGeneratedSince(&done));
2132 __ bind(&done);
2133}
2134
2135
2136void LCodeGen::DoReturn(LReturn* instr) {
2137 if (FLAG_trace) {
2138 // Push the return value on the stack as the parameter.
2139 // Runtime::TraceExit returns its parameter in v0.
2140 __ push(v0);
2141 __ CallRuntime(Runtime::kTraceExit, 1);
2142 }
2143 int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
2144 __ mov(sp, fp);
2145 __ Pop(ra, fp);
2146 __ Addu(sp, sp, Operand(sp_delta));
2147 __ Jump(ra);
2148}
2149
2150
2151void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2152 Register result = ToRegister(instr->result());
2153 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell())));
2154 __ lw(result, FieldMemOperand(at, JSGlobalPropertyCell::kValueOffset));
2155 if (instr->hydrogen()->RequiresHoleCheck()) {
2156 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2157 DeoptimizeIf(eq, instr->environment(), result, Operand(at));
2158 }
2159}
2160
2161
2162void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2163 ASSERT(ToRegister(instr->global_object()).is(a0));
2164 ASSERT(ToRegister(instr->result()).is(v0));
2165
2166 __ li(a2, Operand(instr->name()));
2167 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2168 : RelocInfo::CODE_TARGET_CONTEXT;
2169 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2170 CallCode(ic, mode, instr);
2171}
2172
2173
2174void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
danno@chromium.orge78f9fc2011-12-21 08:29:34 +00002175 Register value = ToRegister(instr->value());
2176 Register cell = scratch0();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002177
2178 // Load the cell.
danno@chromium.orge78f9fc2011-12-21 08:29:34 +00002179 __ li(cell, Operand(instr->hydrogen()->cell()));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002180
2181 // If the cell we are storing to contains the hole it could have
2182 // been deleted from the property dictionary. In that case, we need
2183 // to update the property details in the property dictionary to mark
2184 // it as no longer deleted.
2185 if (instr->hydrogen()->RequiresHoleCheck()) {
danno@chromium.orge78f9fc2011-12-21 08:29:34 +00002186 // We use a temp to check the payload.
2187 Register payload = ToRegister(instr->TempAt(0));
2188 __ lw(payload, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002189 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
danno@chromium.orge78f9fc2011-12-21 08:29:34 +00002190 DeoptimizeIf(eq, instr->environment(), payload, Operand(at));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002191 }
2192
2193 // Store the value.
danno@chromium.orge78f9fc2011-12-21 08:29:34 +00002194 __ sw(value, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002195 // Cells are always rescanned, so no write barrier here.
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002196}
2197
2198
2199void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2200 ASSERT(ToRegister(instr->global_object()).is(a1));
2201 ASSERT(ToRegister(instr->value()).is(a0));
2202
2203 __ li(a2, Operand(instr->name()));
mstarzinger@chromium.org1b3afd12011-11-29 14:28:56 +00002204 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002205 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2206 : isolate()->builtins()->StoreIC_Initialize();
2207 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2208}
2209
2210
2211void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2212 Register context = ToRegister(instr->context());
2213 Register result = ToRegister(instr->result());
ricow@chromium.org7ad65222011-12-19 12:13:11 +00002214
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002215 __ lw(result, ContextOperand(context, instr->slot_index()));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002216 if (instr->hydrogen()->RequiresHoleCheck()) {
2217 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00002218
2219 if (instr->hydrogen()->DeoptimizesOnHole()) {
2220 DeoptimizeIf(eq, instr->environment(), result, Operand(at));
2221 } else {
2222 Label is_not_hole;
2223 __ Branch(&is_not_hole, ne, result, Operand(at));
2224 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2225 __ bind(&is_not_hole);
2226 }
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002227 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002228}
2229
2230
2231void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2232 Register context = ToRegister(instr->context());
2233 Register value = ToRegister(instr->value());
ricow@chromium.org7ad65222011-12-19 12:13:11 +00002234 Register scratch = scratch0();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002235 MemOperand target = ContextOperand(context, instr->slot_index());
ricow@chromium.org7ad65222011-12-19 12:13:11 +00002236
2237 Label skip_assignment;
2238
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002239 if (instr->hydrogen()->RequiresHoleCheck()) {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002240 __ lw(scratch, target);
2241 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
ricow@chromium.org7ad65222011-12-19 12:13:11 +00002242
2243 if (instr->hydrogen()->DeoptimizesOnHole()) {
2244 DeoptimizeIf(eq, instr->environment(), scratch, Operand(at));
2245 } else {
2246 __ Branch(&skip_assignment, ne, scratch, Operand(at));
2247 }
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002248 }
ricow@chromium.org7ad65222011-12-19 12:13:11 +00002249
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002250 __ sw(value, target);
2251 if (instr->hydrogen()->NeedsWriteBarrier()) {
2252 HType type = instr->hydrogen()->value()->type();
2253 SmiCheck check_needed =
2254 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2255 __ RecordWriteContextSlot(context,
2256 target.offset(),
2257 value,
2258 scratch0(),
2259 kRAHasBeenSaved,
2260 kSaveFPRegs,
2261 EMIT_REMEMBERED_SET,
2262 check_needed);
2263 }
ricow@chromium.org7ad65222011-12-19 12:13:11 +00002264
2265 __ bind(&skip_assignment);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002266}
2267
2268
2269void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2270 Register object = ToRegister(instr->InputAt(0));
2271 Register result = ToRegister(instr->result());
2272 if (instr->hydrogen()->is_in_object()) {
2273 __ lw(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2274 } else {
2275 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2276 __ lw(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2277 }
2278}
2279
2280
2281void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2282 Register object,
2283 Handle<Map> type,
2284 Handle<String> name) {
2285 LookupResult lookup(isolate());
2286 type->LookupInDescriptors(NULL, *name, &lookup);
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00002287 ASSERT(lookup.IsFound() &&
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002288 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2289 if (lookup.type() == FIELD) {
2290 int index = lookup.GetLocalFieldIndexFromMap(*type);
2291 int offset = index * kPointerSize;
2292 if (index < 0) {
2293 // Negative property indices are in-object properties, indexed
2294 // from the end of the fixed part of the object.
2295 __ lw(result, FieldMemOperand(object, offset + type->instance_size()));
2296 } else {
2297 // Non-negative property indices are in the properties array.
2298 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2299 __ lw(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2300 }
2301 } else {
2302 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002303 __ LoadHeapObject(result, function);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002304 }
2305}
2306
2307
2308void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2309 Register object = ToRegister(instr->object());
2310 Register result = ToRegister(instr->result());
2311 Register scratch = scratch0();
2312 int map_count = instr->hydrogen()->types()->length();
2313 Handle<String> name = instr->hydrogen()->name();
2314 if (map_count == 0) {
2315 ASSERT(instr->hydrogen()->need_generic());
2316 __ li(a2, Operand(name));
2317 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2318 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2319 } else {
2320 Label done;
2321 __ lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2322 for (int i = 0; i < map_count - 1; ++i) {
2323 Handle<Map> map = instr->hydrogen()->types()->at(i);
2324 Label next;
2325 __ Branch(&next, ne, scratch, Operand(map));
2326 EmitLoadFieldOrConstantFunction(result, object, map, name);
2327 __ Branch(&done);
2328 __ bind(&next);
2329 }
2330 Handle<Map> map = instr->hydrogen()->types()->last();
2331 if (instr->hydrogen()->need_generic()) {
2332 Label generic;
2333 __ Branch(&generic, ne, scratch, Operand(map));
2334 EmitLoadFieldOrConstantFunction(result, object, map, name);
2335 __ Branch(&done);
2336 __ bind(&generic);
2337 __ li(a2, Operand(name));
2338 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2339 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2340 } else {
2341 DeoptimizeIf(ne, instr->environment(), scratch, Operand(map));
2342 EmitLoadFieldOrConstantFunction(result, object, map, name);
2343 }
2344 __ bind(&done);
2345 }
2346}
2347
2348
2349void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2350 ASSERT(ToRegister(instr->object()).is(a0));
2351 ASSERT(ToRegister(instr->result()).is(v0));
2352
2353 // Name is always in a2.
2354 __ li(a2, Operand(instr->name()));
2355 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2356 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2357}
2358
2359
2360void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2361 Register scratch = scratch0();
2362 Register function = ToRegister(instr->function());
2363 Register result = ToRegister(instr->result());
2364
2365 // Check that the function really is a function. Load map into the
2366 // result register.
2367 __ GetObjectType(function, result, scratch);
2368 DeoptimizeIf(ne, instr->environment(), scratch, Operand(JS_FUNCTION_TYPE));
2369
2370 // Make sure that the function has an instance prototype.
2371 Label non_instance;
2372 __ lbu(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2373 __ And(scratch, scratch, Operand(1 << Map::kHasNonInstancePrototype));
2374 __ Branch(&non_instance, ne, scratch, Operand(zero_reg));
2375
2376 // Get the prototype or initial map from the function.
2377 __ lw(result,
2378 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2379
2380 // Check that the function has a prototype or an initial map.
2381 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2382 DeoptimizeIf(eq, instr->environment(), result, Operand(at));
2383
2384 // If the function does not have an initial map, we're done.
2385 Label done;
2386 __ GetObjectType(result, scratch, scratch);
2387 __ Branch(&done, ne, scratch, Operand(MAP_TYPE));
2388
2389 // Get the prototype from the initial map.
2390 __ lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
2391 __ Branch(&done);
2392
2393 // Non-instance prototype: Fetch prototype from constructor field
2394 // in initial map.
2395 __ bind(&non_instance);
2396 __ lw(result, FieldMemOperand(result, Map::kConstructorOffset));
2397
2398 // All done.
2399 __ bind(&done);
2400}
2401
2402
2403void LCodeGen::DoLoadElements(LLoadElements* instr) {
2404 Register result = ToRegister(instr->result());
2405 Register input = ToRegister(instr->InputAt(0));
2406 Register scratch = scratch0();
2407
2408 __ lw(result, FieldMemOperand(input, JSObject::kElementsOffset));
2409 if (FLAG_debug_code) {
2410 Label done, fail;
2411 __ lw(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
2412 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
2413 __ Branch(USE_DELAY_SLOT, &done, eq, scratch, Operand(at));
2414 __ LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex); // In the delay slot.
2415 __ Branch(&done, eq, scratch, Operand(at));
2416 // |scratch| still contains |input|'s map.
2417 __ lbu(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
2418 __ Ext(scratch, scratch, Map::kElementsKindShift,
2419 Map::kElementsKindBitCount);
2420 __ Branch(&done, eq, scratch,
2421 Operand(FAST_ELEMENTS));
2422 __ Branch(&fail, lt, scratch,
2423 Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2424 __ Branch(&done, le, scratch,
2425 Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
2426 __ bind(&fail);
2427 __ Abort("Check for fast or external elements failed.");
2428 __ bind(&done);
2429 }
2430}
2431
2432
2433void LCodeGen::DoLoadExternalArrayPointer(
2434 LLoadExternalArrayPointer* instr) {
2435 Register to_reg = ToRegister(instr->result());
2436 Register from_reg = ToRegister(instr->InputAt(0));
2437 __ lw(to_reg, FieldMemOperand(from_reg,
2438 ExternalArray::kExternalPointerOffset));
2439}
2440
2441
2442void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2443 Register arguments = ToRegister(instr->arguments());
2444 Register length = ToRegister(instr->length());
2445 Register index = ToRegister(instr->index());
2446 Register result = ToRegister(instr->result());
2447
2448 // Bailout index is not a valid argument index. Use unsigned check to get
2449 // negative check for free.
2450
2451 // TODO(plind): Shoud be optimized to do the sub before the DeoptimizeIf(),
2452 // as they do in Arm. It will save us an instruction.
2453 DeoptimizeIf(ls, instr->environment(), length, Operand(index));
2454
2455 // There are two words between the frame pointer and the last argument.
2456 // Subtracting from length accounts for one of them, add one more.
2457 __ subu(length, length, index);
2458 __ Addu(length, length, Operand(1));
2459 __ sll(length, length, kPointerSizeLog2);
2460 __ Addu(at, arguments, Operand(length));
2461 __ lw(result, MemOperand(at, 0));
2462}
2463
2464
2465void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2466 Register elements = ToRegister(instr->elements());
2467 Register key = EmitLoadRegister(instr->key(), scratch0());
2468 Register result = ToRegister(instr->result());
2469 Register scratch = scratch0();
2470
2471 // Load the result.
2472 __ sll(scratch, key, kPointerSizeLog2); // Key indexes words.
2473 __ addu(scratch, elements, scratch);
2474 __ lw(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2475
2476 // Check for the hole value.
2477 if (instr->hydrogen()->RequiresHoleCheck()) {
2478 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2479 DeoptimizeIf(eq, instr->environment(), result, Operand(scratch));
2480 }
2481}
2482
2483
2484void LCodeGen::DoLoadKeyedFastDoubleElement(
2485 LLoadKeyedFastDoubleElement* instr) {
2486 Register elements = ToRegister(instr->elements());
2487 bool key_is_constant = instr->key()->IsConstantOperand();
2488 Register key = no_reg;
2489 DoubleRegister result = ToDoubleRegister(instr->result());
2490 Register scratch = scratch0();
2491
2492 int shift_size =
2493 ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
2494 int constant_key = 0;
2495 if (key_is_constant) {
2496 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2497 if (constant_key & 0xF0000000) {
2498 Abort("array index constant value too big.");
2499 }
2500 } else {
2501 key = ToRegister(instr->key());
2502 }
2503
2504 if (key_is_constant) {
2505 __ Addu(elements, elements, Operand(constant_key * (1 << shift_size) +
2506 FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2507 } else {
2508 __ sll(scratch, key, shift_size);
2509 __ Addu(elements, elements, Operand(scratch));
2510 __ Addu(elements, elements,
2511 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2512 }
2513
2514 __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
2515 DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32));
2516
2517 __ ldc1(result, MemOperand(elements));
2518}
2519
2520
2521void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2522 LLoadKeyedSpecializedArrayElement* instr) {
2523 Register external_pointer = ToRegister(instr->external_pointer());
2524 Register key = no_reg;
2525 ElementsKind elements_kind = instr->elements_kind();
2526 bool key_is_constant = instr->key()->IsConstantOperand();
2527 int constant_key = 0;
2528 if (key_is_constant) {
2529 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
2530 if (constant_key & 0xF0000000) {
2531 Abort("array index constant value too big.");
2532 }
2533 } else {
2534 key = ToRegister(instr->key());
2535 }
2536 int shift_size = ElementsKindToShiftSize(elements_kind);
2537
2538 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
2539 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
2540 FPURegister result = ToDoubleRegister(instr->result());
2541 if (key_is_constant) {
2542 __ Addu(scratch0(), external_pointer, constant_key * (1 << shift_size));
2543 } else {
2544 __ sll(scratch0(), key, shift_size);
2545 __ Addu(scratch0(), scratch0(), external_pointer);
2546 }
2547
2548 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
2549 __ lwc1(result, MemOperand(scratch0()));
2550 __ cvt_d_s(result, result);
2551 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
2552 __ ldc1(result, MemOperand(scratch0()));
2553 }
2554 } else {
2555 Register result = ToRegister(instr->result());
2556 Register scratch = scratch0();
2557 MemOperand mem_operand(zero_reg);
2558 if (key_is_constant) {
2559 mem_operand = MemOperand(external_pointer,
2560 constant_key * (1 << shift_size));
2561 } else {
2562 __ sll(scratch, key, shift_size);
2563 __ Addu(scratch, scratch, external_pointer);
2564 mem_operand = MemOperand(scratch);
2565 }
2566 switch (elements_kind) {
2567 case EXTERNAL_BYTE_ELEMENTS:
2568 __ lb(result, mem_operand);
2569 break;
2570 case EXTERNAL_PIXEL_ELEMENTS:
2571 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
2572 __ lbu(result, mem_operand);
2573 break;
2574 case EXTERNAL_SHORT_ELEMENTS:
2575 __ lh(result, mem_operand);
2576 break;
2577 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
2578 __ lhu(result, mem_operand);
2579 break;
2580 case EXTERNAL_INT_ELEMENTS:
2581 __ lw(result, mem_operand);
2582 break;
2583 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
2584 __ lw(result, mem_operand);
2585 // TODO(danno): we could be more clever here, perhaps having a special
2586 // version of the stub that detects if the overflow case actually
2587 // happens, and generate code that returns a double rather than int.
2588 DeoptimizeIf(Ugreater_equal, instr->environment(),
2589 result, Operand(0x80000000));
2590 break;
2591 case EXTERNAL_FLOAT_ELEMENTS:
2592 case EXTERNAL_DOUBLE_ELEMENTS:
2593 case FAST_DOUBLE_ELEMENTS:
2594 case FAST_ELEMENTS:
2595 case FAST_SMI_ONLY_ELEMENTS:
2596 case DICTIONARY_ELEMENTS:
2597 case NON_STRICT_ARGUMENTS_ELEMENTS:
2598 UNREACHABLE();
2599 break;
2600 }
2601 }
2602}
2603
2604
2605void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2606 ASSERT(ToRegister(instr->object()).is(a1));
2607 ASSERT(ToRegister(instr->key()).is(a0));
2608
2609 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2610 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2611}
2612
2613
2614void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2615 Register scratch = scratch0();
2616 Register temp = scratch1();
2617 Register result = ToRegister(instr->result());
2618
2619 // Check if the calling frame is an arguments adaptor frame.
2620 Label done, adapted;
2621 __ lw(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2622 __ lw(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2623 __ Xor(temp, result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2624
2625 // Result is the frame pointer for the frame if not adapted and for the real
2626 // frame below the adaptor frame if adapted.
2627 __ movn(result, fp, temp); // move only if temp is not equal to zero (ne)
2628 __ movz(result, scratch, temp); // move only if temp is equal to zero (eq)
2629}
2630
2631
2632void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2633 Register elem = ToRegister(instr->InputAt(0));
2634 Register result = ToRegister(instr->result());
2635
2636 Label done;
2637
2638 // If no arguments adaptor frame the number of arguments is fixed.
2639 __ Addu(result, zero_reg, Operand(scope()->num_parameters()));
2640 __ Branch(&done, eq, fp, Operand(elem));
2641
2642 // Arguments adaptor frame present. Get argument length from there.
2643 __ lw(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2644 __ lw(result,
2645 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2646 __ SmiUntag(result);
2647
2648 // Argument length is in result register.
2649 __ bind(&done);
2650}
2651
2652
2653void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2654 Register receiver = ToRegister(instr->receiver());
2655 Register function = ToRegister(instr->function());
2656 Register length = ToRegister(instr->length());
2657 Register elements = ToRegister(instr->elements());
2658 Register scratch = scratch0();
2659 ASSERT(receiver.is(a0)); // Used for parameter count.
2660 ASSERT(function.is(a1)); // Required by InvokeFunction.
2661 ASSERT(ToRegister(instr->result()).is(v0));
2662
2663 // If the receiver is null or undefined, we have to pass the global
2664 // object as a receiver to normal functions. Values have to be
2665 // passed unchanged to builtins and strict-mode functions.
2666 Label global_object, receiver_ok;
2667
2668 // Do not transform the receiver to object for strict mode
2669 // functions.
2670 __ lw(scratch,
2671 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
2672 __ lw(scratch,
2673 FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
2674
2675 // Do not transform the receiver to object for builtins.
2676 int32_t strict_mode_function_mask =
2677 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize);
2678 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize);
2679 __ And(scratch, scratch, Operand(strict_mode_function_mask | native_mask));
2680 __ Branch(&receiver_ok, ne, scratch, Operand(zero_reg));
2681
2682 // Normal function. Replace undefined or null with global receiver.
2683 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2684 __ Branch(&global_object, eq, receiver, Operand(scratch));
2685 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2686 __ Branch(&global_object, eq, receiver, Operand(scratch));
2687
2688 // Deoptimize if the receiver is not a JS object.
2689 __ And(scratch, receiver, Operand(kSmiTagMask));
2690 DeoptimizeIf(eq, instr->environment(), scratch, Operand(zero_reg));
2691
2692 __ GetObjectType(receiver, scratch, scratch);
2693 DeoptimizeIf(lt, instr->environment(),
2694 scratch, Operand(FIRST_SPEC_OBJECT_TYPE));
2695 __ Branch(&receiver_ok);
2696
2697 __ bind(&global_object);
2698 __ lw(receiver, GlobalObjectOperand());
2699 __ lw(receiver,
2700 FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
2701 __ bind(&receiver_ok);
2702
2703 // Copy the arguments to this function possibly from the
2704 // adaptor frame below it.
2705 const uint32_t kArgumentsLimit = 1 * KB;
2706 DeoptimizeIf(hi, instr->environment(), length, Operand(kArgumentsLimit));
2707
2708 // Push the receiver and use the register to keep the original
2709 // number of arguments.
2710 __ push(receiver);
2711 __ Move(receiver, length);
2712 // The arguments are at a one pointer size offset from elements.
2713 __ Addu(elements, elements, Operand(1 * kPointerSize));
2714
2715 // Loop through the arguments pushing them onto the execution
2716 // stack.
2717 Label invoke, loop;
2718 // length is a small non-negative integer, due to the test above.
2719 __ Branch(USE_DELAY_SLOT, &invoke, eq, length, Operand(zero_reg));
2720 __ sll(scratch, length, 2);
2721 __ bind(&loop);
2722 __ Addu(scratch, elements, scratch);
2723 __ lw(scratch, MemOperand(scratch));
2724 __ push(scratch);
2725 __ Subu(length, length, Operand(1));
2726 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg));
2727 __ sll(scratch, length, 2);
2728
2729 __ bind(&invoke);
2730 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2731 LPointerMap* pointers = instr->pointer_map();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002732 RecordPosition(pointers->position());
erikcorry0ad885c2011-11-21 13:51:57 +00002733 SafepointGenerator safepoint_generator(
2734 this, pointers, Safepoint::kLazyDeopt);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002735 // The number of arguments is stored in receiver which is a0, as expected
2736 // by InvokeFunction.
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00002737 ParameterCount actual(receiver);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002738 __ InvokeFunction(function, actual, CALL_FUNCTION,
2739 safepoint_generator, CALL_AS_METHOD);
2740 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2741}
2742
2743
2744void LCodeGen::DoPushArgument(LPushArgument* instr) {
2745 LOperand* argument = instr->InputAt(0);
2746 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2747 Abort("DoPushArgument not implemented for double type.");
2748 } else {
2749 Register argument_reg = EmitLoadRegister(argument, at);
2750 __ push(argument_reg);
2751 }
2752}
2753
2754
2755void LCodeGen::DoThisFunction(LThisFunction* instr) {
2756 Register result = ToRegister(instr->result());
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00002757 __ LoadHeapObject(result, instr->hydrogen()->closure());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002758}
2759
2760
2761void LCodeGen::DoContext(LContext* instr) {
2762 Register result = ToRegister(instr->result());
2763 __ mov(result, cp);
2764}
2765
2766
2767void LCodeGen::DoOuterContext(LOuterContext* instr) {
2768 Register context = ToRegister(instr->context());
2769 Register result = ToRegister(instr->result());
2770 __ lw(result,
2771 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2772}
2773
2774
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00002775void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2776 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
2777 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
2778 // The context is the first argument.
2779 __ Push(cp, scratch0(), scratch1());
2780 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
2781}
2782
2783
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002784void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2785 Register context = ToRegister(instr->context());
2786 Register result = ToRegister(instr->result());
2787 __ lw(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2788}
2789
2790
2791void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2792 Register global = ToRegister(instr->global());
2793 Register result = ToRegister(instr->result());
2794 __ lw(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
2795}
2796
2797
2798void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2799 int arity,
2800 LInstruction* instr,
2801 CallKind call_kind) {
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00002802 bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
2803 function->shared()->formal_parameter_count() == arity;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002804
2805 LPointerMap* pointers = instr->pointer_map();
2806 RecordPosition(pointers->position());
2807
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00002808 if (can_invoke_directly) {
2809 __ LoadHeapObject(a1, function);
2810 // Change context if needed.
2811 bool change_context =
2812 (info()->closure()->context() != function->context()) ||
2813 scope()->contains_with() ||
2814 (scope()->num_heap_slots() > 0);
2815 if (change_context) {
2816 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2817 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002818
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00002819 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2820 // is available to write to at this point.
2821 if (!function->NeedsArgumentsAdaption()) {
2822 __ li(a0, Operand(arity));
2823 }
2824
2825 // Invoke function.
2826 __ SetCallKind(t1, call_kind);
2827 __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2828 __ Call(at);
2829
2830 // Set up deoptimization.
2831 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
2832 } else {
2833 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
2834 ParameterCount count(arity);
2835 __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
2836 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002837
2838 // Restore context.
2839 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2840}
2841
2842
2843void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2844 ASSERT(ToRegister(instr->result()).is(v0));
2845 __ mov(a0, v0);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002846 CallKnownFunction(instr->function(), instr->arity(), instr, CALL_AS_METHOD);
2847}
2848
2849
2850void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2851 Register input = ToRegister(instr->InputAt(0));
2852 Register result = ToRegister(instr->result());
2853 Register scratch = scratch0();
2854
2855 // Deoptimize if not a heap number.
2856 __ lw(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2857 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
2858 DeoptimizeIf(ne, instr->environment(), scratch, Operand(at));
2859
2860 Label done;
2861 Register exponent = scratch0();
2862 scratch = no_reg;
2863 __ lw(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2864 // Check the sign of the argument. If the argument is positive, just
2865 // return it.
2866 __ Move(result, input);
2867 __ And(at, exponent, Operand(HeapNumber::kSignMask));
2868 __ Branch(&done, eq, at, Operand(zero_reg));
2869
2870 // Input is negative. Reverse its sign.
2871 // Preserve the value of all registers.
2872 {
2873 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
2874
2875 // Registers were saved at the safepoint, so we can use
2876 // many scratch registers.
2877 Register tmp1 = input.is(a1) ? a0 : a1;
2878 Register tmp2 = input.is(a2) ? a0 : a2;
2879 Register tmp3 = input.is(a3) ? a0 : a3;
2880 Register tmp4 = input.is(t0) ? a0 : t0;
2881
2882 // exponent: floating point exponent value.
2883
2884 Label allocated, slow;
2885 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2886 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2887 __ Branch(&allocated);
2888
2889 // Slow case: Call the runtime system to do the number allocation.
2890 __ bind(&slow);
2891
2892 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2893 // Set the pointer to the new heap number in tmp.
2894 if (!tmp1.is(v0))
2895 __ mov(tmp1, v0);
2896 // Restore input_reg after call to runtime.
2897 __ LoadFromSafepointRegisterSlot(input, input);
2898 __ lw(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2899
2900 __ bind(&allocated);
2901 // exponent: floating point exponent value.
2902 // tmp1: allocated heap number.
2903 __ And(exponent, exponent, Operand(~HeapNumber::kSignMask));
2904 __ sw(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2905 __ lw(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2906 __ sw(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
2907
2908 __ StoreToSafepointRegisterSlot(tmp1, result);
2909 }
2910
2911 __ bind(&done);
2912}
2913
2914
2915void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2916 Register input = ToRegister(instr->InputAt(0));
2917 Register result = ToRegister(instr->result());
2918 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
2919 Label done;
2920 __ Branch(USE_DELAY_SLOT, &done, ge, input, Operand(zero_reg));
2921 __ mov(result, input);
2922 ASSERT_EQ(2, masm()->InstructionsGeneratedSince(&done));
2923 __ subu(result, zero_reg, input);
ulan@chromium.org2efb9002012-01-19 15:36:35 +00002924 // Overflow if result is still negative, i.e. 0x80000000.
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00002925 DeoptimizeIf(lt, instr->environment(), result, Operand(zero_reg));
2926 __ bind(&done);
2927}
2928
2929
2930void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2931 // Class for deferred case.
2932 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2933 public:
2934 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2935 LUnaryMathOperation* instr)
2936 : LDeferredCode(codegen), instr_(instr) { }
2937 virtual void Generate() {
2938 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2939 }
2940 virtual LInstruction* instr() { return instr_; }
2941 private:
2942 LUnaryMathOperation* instr_;
2943 };
2944
2945 Representation r = instr->hydrogen()->value()->representation();
2946 if (r.IsDouble()) {
2947 FPURegister input = ToDoubleRegister(instr->InputAt(0));
2948 FPURegister result = ToDoubleRegister(instr->result());
2949 __ abs_d(result, input);
2950 } else if (r.IsInteger32()) {
2951 EmitIntegerMathAbs(instr);
2952 } else {
2953 // Representation is tagged.
2954 DeferredMathAbsTaggedHeapNumber* deferred =
2955 new DeferredMathAbsTaggedHeapNumber(this, instr);
2956 Register input = ToRegister(instr->InputAt(0));
2957 // Smi check.
2958 __ JumpIfNotSmi(input, deferred->entry());
2959 // If smi, handle it directly.
2960 EmitIntegerMathAbs(instr);
2961 __ bind(deferred->exit());
2962 }
2963}
2964
2965
2966void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2967 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2968 Register result = ToRegister(instr->result());
2969 FPURegister single_scratch = double_scratch0().low();
2970 Register scratch1 = scratch0();
2971 Register except_flag = ToRegister(instr->TempAt(0));
2972
2973 __ EmitFPUTruncate(kRoundToMinusInf,
2974 single_scratch,
2975 input,
2976 scratch1,
2977 except_flag);
2978
2979 // Deopt if the operation did not succeed.
2980 DeoptimizeIf(ne, instr->environment(), except_flag, Operand(zero_reg));
2981
2982 // Load the result.
2983 __ mfc1(result, single_scratch);
2984
2985 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2986 // Test for -0.
2987 Label done;
2988 __ Branch(&done, ne, result, Operand(zero_reg));
2989 __ mfc1(scratch1, input.high());
2990 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
2991 DeoptimizeIf(ne, instr->environment(), scratch1, Operand(zero_reg));
2992 __ bind(&done);
2993 }
2994}
2995
2996
2997void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2998 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2999 Register result = ToRegister(instr->result());
3000 Register scratch = scratch0();
3001 Label done, check_sign_on_zero;
3002
3003 // Extract exponent bits.
3004 __ mfc1(result, input.high());
3005 __ Ext(scratch,
3006 result,
3007 HeapNumber::kExponentShift,
3008 HeapNumber::kExponentBits);
3009
3010 // If the number is in ]-0.5, +0.5[, the result is +/- 0.
3011 Label skip1;
3012 __ Branch(&skip1, gt, scratch, Operand(HeapNumber::kExponentBias - 2));
3013 __ mov(result, zero_reg);
3014 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3015 __ Branch(&check_sign_on_zero);
3016 } else {
3017 __ Branch(&done);
3018 }
3019 __ bind(&skip1);
3020
3021 // The following conversion will not work with numbers
3022 // outside of ]-2^32, 2^32[.
3023 DeoptimizeIf(ge, instr->environment(), scratch,
3024 Operand(HeapNumber::kExponentBias + 32));
3025
3026 // Save the original sign for later comparison.
3027 __ And(scratch, result, Operand(HeapNumber::kSignMask));
3028
3029 __ Move(double_scratch0(), 0.5);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003030 __ add_d(double_scratch0(), input, double_scratch0());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003031
3032 // Check sign of the result: if the sign changed, the input
3033 // value was in ]0.5, 0[ and the result should be -0.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003034 __ mfc1(result, double_scratch0().high());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003035 __ Xor(result, result, Operand(scratch));
3036 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3037 // ARM uses 'mi' here, which is 'lt'
3038 DeoptimizeIf(lt, instr->environment(), result,
3039 Operand(zero_reg));
3040 } else {
3041 Label skip2;
3042 // ARM uses 'mi' here, which is 'lt'
3043 // Negating it results in 'ge'
3044 __ Branch(&skip2, ge, result, Operand(zero_reg));
3045 __ mov(result, zero_reg);
3046 __ Branch(&done);
3047 __ bind(&skip2);
3048 }
3049
3050 Register except_flag = scratch;
3051
3052 __ EmitFPUTruncate(kRoundToMinusInf,
3053 double_scratch0().low(),
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003054 double_scratch0(),
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003055 result,
3056 except_flag);
3057
3058 DeoptimizeIf(ne, instr->environment(), except_flag, Operand(zero_reg));
3059
3060 __ mfc1(result, double_scratch0().low());
3061
3062 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3063 // Test for -0.
3064 __ Branch(&done, ne, result, Operand(zero_reg));
3065 __ bind(&check_sign_on_zero);
3066 __ mfc1(scratch, input.high());
3067 __ And(scratch, scratch, Operand(HeapNumber::kSignMask));
3068 DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
3069 }
3070 __ bind(&done);
3071}
3072
3073
3074void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
3075 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
3076 DoubleRegister result = ToDoubleRegister(instr->result());
3077 __ sqrt_d(result, input);
3078}
3079
3080
3081void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
3082 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
3083 DoubleRegister result = ToDoubleRegister(instr->result());
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003084 DoubleRegister temp = ToDoubleRegister(instr->TempAt(0));
3085
3086 ASSERT(!input.is(result));
3087
3088 // Note that according to ECMA-262 15.8.2.13:
3089 // Math.pow(-Infinity, 0.5) == Infinity
3090 // Math.sqrt(-Infinity) == NaN
3091 Label done;
3092 __ Move(temp, -V8_INFINITY);
3093 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, temp, input);
3094 // Set up Infinity in the delay slot.
3095 // result is overwritten if the branch is not taken.
3096 __ neg_d(result, temp);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003097
3098 // Add +0 to convert -0 to +0.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003099 __ add_d(result, input, kDoubleRegZero);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003100 __ sqrt_d(result, result);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003101 __ bind(&done);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003102}
3103
3104
3105void LCodeGen::DoPower(LPower* instr) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003106 Representation exponent_type = instr->hydrogen()->right()->representation();
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003107 // Having marked this as a call, we can use any registers.
3108 // Just make sure that the input/output registers are the expected ones.
3109 ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
3110 ToDoubleRegister(instr->InputAt(1)).is(f4));
3111 ASSERT(!instr->InputAt(1)->IsRegister() ||
3112 ToRegister(instr->InputAt(1)).is(a2));
3113 ASSERT(ToDoubleRegister(instr->InputAt(0)).is(f2));
3114 ASSERT(ToDoubleRegister(instr->result()).is(f0));
3115
3116 if (exponent_type.IsTagged()) {
3117 Label no_deopt;
3118 __ JumpIfSmi(a2, &no_deopt);
3119 __ lw(t3, FieldMemOperand(a2, HeapObject::kMapOffset));
3120 DeoptimizeIf(ne, instr->environment(), t3, Operand(at));
3121 __ bind(&no_deopt);
3122 MathPowStub stub(MathPowStub::TAGGED);
3123 __ CallStub(&stub);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003124 } else if (exponent_type.IsInteger32()) {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003125 MathPowStub stub(MathPowStub::INTEGER);
3126 __ CallStub(&stub);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003127 } else {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003128 ASSERT(exponent_type.IsDouble());
3129 MathPowStub stub(MathPowStub::DOUBLE);
3130 __ CallStub(&stub);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003131 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003132}
3133
3134
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00003135void LCodeGen::DoRandom(LRandom* instr) {
erik.corry@gmail.combbceb572012-03-09 10:52:05 +00003136 class DeferredDoRandom: public LDeferredCode {
3137 public:
3138 DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
3139 : LDeferredCode(codegen), instr_(instr) { }
3140 virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
3141 virtual LInstruction* instr() { return instr_; }
3142 private:
3143 LRandom* instr_;
3144 };
3145
3146 DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00003147 // Having marked this instruction as a call we can use any
3148 // registers.
3149 ASSERT(ToDoubleRegister(instr->result()).is(f0));
3150 ASSERT(ToRegister(instr->InputAt(0)).is(a0));
3151
erik.corry@gmail.combbceb572012-03-09 10:52:05 +00003152 static const int kSeedSize = sizeof(uint32_t);
3153 STATIC_ASSERT(kPointerSize == kSeedSize);
3154
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00003155 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
erik.corry@gmail.combbceb572012-03-09 10:52:05 +00003156 static const int kRandomSeedOffset =
3157 FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
3158 __ lw(a2, FieldMemOperand(a0, kRandomSeedOffset));
3159 // a2: FixedArray of the global context's random seeds
3160
3161 // Load state[0].
3162 __ lw(a1, FieldMemOperand(a2, ByteArray::kHeaderSize));
3163 __ Branch(deferred->entry(), eq, a1, Operand(zero_reg));
3164 // Load state[1].
3165 __ lw(a0, FieldMemOperand(a2, ByteArray::kHeaderSize + kSeedSize));
3166 // a1: state[0].
3167 // a0: state[1].
3168
3169 // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
3170 __ And(a3, a1, Operand(0xFFFF));
3171 __ li(t0, Operand(18273));
3172 __ mul(a3, a3, t0);
3173 __ srl(a1, a1, 16);
3174 __ Addu(a1, a3, a1);
3175 // Save state[0].
3176 __ sw(a1, FieldMemOperand(a2, ByteArray::kHeaderSize));
3177
3178 // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
3179 __ And(a3, a0, Operand(0xFFFF));
3180 __ li(t0, Operand(36969));
3181 __ mul(a3, a3, t0);
3182 __ srl(a0, a0, 16),
3183 __ Addu(a0, a3, a0);
3184 // Save state[1].
3185 __ sw(a0, FieldMemOperand(a2, ByteArray::kHeaderSize + kSeedSize));
3186
3187 // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
3188 __ And(a0, a0, Operand(0x3FFFF));
3189 __ sll(a1, a1, 14);
3190 __ Addu(v0, a0, a1);
3191
3192 __ bind(deferred->exit());
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00003193
3194 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3195 __ li(a2, Operand(0x41300000));
3196 // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU.
3197 __ Move(f12, v0, a2);
3198 // Move 0x4130000000000000 to FPU.
3199 __ Move(f14, zero_reg, a2);
3200 // Subtract to get the result.
3201 __ sub_d(f0, f12, f14);
3202}
3203
erik.corry@gmail.combbceb572012-03-09 10:52:05 +00003204void LCodeGen::DoDeferredRandom(LRandom* instr) {
3205 __ PrepareCallCFunction(1, scratch0());
3206 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3207 // Return value is in v0.
3208}
3209
erik.corry@gmail.comf2038fb2012-01-16 11:42:08 +00003210
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003211void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3212 ASSERT(ToDoubleRegister(instr->result()).is(f4));
3213 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3214 TranscendentalCacheStub::UNTAGGED);
3215 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3216}
3217
3218
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003219void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3220 ASSERT(ToDoubleRegister(instr->result()).is(f4));
3221 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3222 TranscendentalCacheStub::UNTAGGED);
3223 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3224}
3225
3226
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003227void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3228 ASSERT(ToDoubleRegister(instr->result()).is(f4));
3229 TranscendentalCacheStub stub(TranscendentalCache::COS,
3230 TranscendentalCacheStub::UNTAGGED);
3231 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3232}
3233
3234
3235void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3236 ASSERT(ToDoubleRegister(instr->result()).is(f4));
3237 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3238 TranscendentalCacheStub::UNTAGGED);
3239 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3240}
3241
3242
3243void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3244 switch (instr->op()) {
3245 case kMathAbs:
3246 DoMathAbs(instr);
3247 break;
3248 case kMathFloor:
3249 DoMathFloor(instr);
3250 break;
3251 case kMathRound:
3252 DoMathRound(instr);
3253 break;
3254 case kMathSqrt:
3255 DoMathSqrt(instr);
3256 break;
3257 case kMathPowHalf:
3258 DoMathPowHalf(instr);
3259 break;
3260 case kMathCos:
3261 DoMathCos(instr);
3262 break;
3263 case kMathSin:
3264 DoMathSin(instr);
3265 break;
svenpanne@chromium.orgecb9dd62011-12-01 08:22:35 +00003266 case kMathTan:
3267 DoMathTan(instr);
3268 break;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003269 case kMathLog:
3270 DoMathLog(instr);
3271 break;
3272 default:
3273 Abort("Unimplemented type of LUnaryMathOperation.");
3274 UNREACHABLE();
3275 }
3276}
3277
3278
3279void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3280 ASSERT(ToRegister(instr->function()).is(a1));
3281 ASSERT(instr->HasPointerMap());
3282 ASSERT(instr->HasDeoptimizationEnvironment());
3283 LPointerMap* pointers = instr->pointer_map();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003284 RecordPosition(pointers->position());
erikcorry0ad885c2011-11-21 13:51:57 +00003285 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003286 ParameterCount count(instr->arity());
3287 __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3288 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3289}
3290
3291
3292void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3293 ASSERT(ToRegister(instr->result()).is(v0));
3294
3295 int arity = instr->arity();
3296 Handle<Code> ic =
3297 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
3298 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3299 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3300}
3301
3302
3303void LCodeGen::DoCallNamed(LCallNamed* instr) {
3304 ASSERT(ToRegister(instr->result()).is(v0));
3305
3306 int arity = instr->arity();
3307 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3308 Handle<Code> ic =
3309 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3310 __ li(a2, Operand(instr->name()));
3311 CallCode(ic, mode, instr);
3312 // Restore context register.
3313 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3314}
3315
3316
3317void LCodeGen::DoCallFunction(LCallFunction* instr) {
danno@chromium.orgc612e022011-11-10 11:38:15 +00003318 ASSERT(ToRegister(instr->function()).is(a1));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003319 ASSERT(ToRegister(instr->result()).is(v0));
3320
3321 int arity = instr->arity();
3322 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
3323 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003324 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3325}
3326
3327
3328void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3329 ASSERT(ToRegister(instr->result()).is(v0));
3330
3331 int arity = instr->arity();
3332 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
3333 Handle<Code> ic =
3334 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3335 __ li(a2, Operand(instr->name()));
3336 CallCode(ic, mode, instr);
3337 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3338}
3339
3340
3341void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3342 ASSERT(ToRegister(instr->result()).is(v0));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003343 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
3344}
3345
3346
3347void LCodeGen::DoCallNew(LCallNew* instr) {
3348 ASSERT(ToRegister(instr->InputAt(0)).is(a1));
3349 ASSERT(ToRegister(instr->result()).is(v0));
3350
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003351 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003352 __ li(a0, Operand(instr->arity()));
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003353 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003354}
3355
3356
3357void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3358 CallRuntime(instr->function(), instr->arity(), instr);
3359}
3360
3361
3362void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3363 Register object = ToRegister(instr->object());
3364 Register value = ToRegister(instr->value());
3365 Register scratch = scratch0();
3366 int offset = instr->offset();
3367
3368 ASSERT(!object.is(value));
3369
3370 if (!instr->transition().is_null()) {
3371 __ li(scratch, Operand(instr->transition()));
3372 __ sw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3373 }
3374
3375 // Do the store.
3376 HType type = instr->hydrogen()->value()->type();
3377 SmiCheck check_needed =
3378 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
3379 if (instr->is_in_object()) {
3380 __ sw(value, FieldMemOperand(object, offset));
3381 if (instr->hydrogen()->NeedsWriteBarrier()) {
3382 // Update the write barrier for the object for in-object properties.
3383 __ RecordWriteField(object,
3384 offset,
3385 value,
3386 scratch,
3387 kRAHasBeenSaved,
3388 kSaveFPRegs,
3389 EMIT_REMEMBERED_SET,
3390 check_needed);
3391 }
3392 } else {
3393 __ lw(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3394 __ sw(value, FieldMemOperand(scratch, offset));
3395 if (instr->hydrogen()->NeedsWriteBarrier()) {
3396 // Update the write barrier for the properties array.
3397 // object is used as a scratch register.
3398 __ RecordWriteField(scratch,
3399 offset,
3400 value,
3401 object,
3402 kRAHasBeenSaved,
3403 kSaveFPRegs,
3404 EMIT_REMEMBERED_SET,
3405 check_needed);
3406 }
3407 }
3408}
3409
3410
3411void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3412 ASSERT(ToRegister(instr->object()).is(a1));
3413 ASSERT(ToRegister(instr->value()).is(a0));
3414
3415 // Name is always in a2.
3416 __ li(a2, Operand(instr->name()));
mstarzinger@chromium.org1b3afd12011-11-29 14:28:56 +00003417 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003418 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3419 : isolate()->builtins()->StoreIC_Initialize();
3420 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3421}
3422
3423
3424void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3425 DeoptimizeIf(hs,
3426 instr->environment(),
3427 ToRegister(instr->index()),
3428 Operand(ToRegister(instr->length())));
3429}
3430
3431
3432void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3433 Register value = ToRegister(instr->value());
3434 Register elements = ToRegister(instr->object());
3435 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3436 Register scratch = scratch0();
3437
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003438 // Do the store.
3439 if (instr->key()->IsConstantOperand()) {
3440 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3441 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3442 int offset =
3443 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3444 __ sw(value, FieldMemOperand(elements, offset));
3445 } else {
3446 __ sll(scratch, key, kPointerSizeLog2);
3447 __ addu(scratch, elements, scratch);
3448 __ sw(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3449 }
3450
3451 if (instr->hydrogen()->NeedsWriteBarrier()) {
3452 HType type = instr->hydrogen()->value()->type();
3453 SmiCheck check_needed =
3454 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
3455 // Compute address of modified element and store it into key register.
3456 __ Addu(key, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3457 __ RecordWrite(elements,
3458 key,
3459 value,
3460 kRAHasBeenSaved,
3461 kSaveFPRegs,
3462 EMIT_REMEMBERED_SET,
3463 check_needed);
3464 }
3465}
3466
3467
3468void LCodeGen::DoStoreKeyedFastDoubleElement(
3469 LStoreKeyedFastDoubleElement* instr) {
3470 DoubleRegister value = ToDoubleRegister(instr->value());
3471 Register elements = ToRegister(instr->elements());
3472 Register key = no_reg;
3473 Register scratch = scratch0();
3474 bool key_is_constant = instr->key()->IsConstantOperand();
3475 int constant_key = 0;
3476 Label not_nan;
3477
3478 // Calculate the effective address of the slot in the array to store the
3479 // double value.
3480 if (key_is_constant) {
3481 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3482 if (constant_key & 0xF0000000) {
3483 Abort("array index constant value too big.");
3484 }
3485 } else {
3486 key = ToRegister(instr->key());
3487 }
3488 int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
3489 if (key_is_constant) {
3490 __ Addu(scratch, elements, Operand(constant_key * (1 << shift_size) +
3491 FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3492 } else {
3493 __ sll(scratch, key, shift_size);
3494 __ Addu(scratch, elements, Operand(scratch));
3495 __ Addu(scratch, scratch,
3496 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
3497 }
3498
3499 Label is_nan;
3500 // Check for NaN. All NaNs must be canonicalized.
3501 __ BranchF(NULL, &is_nan, eq, value, value);
3502 __ Branch(&not_nan);
3503
3504 // Only load canonical NaN if the comparison above set the overflow.
3505 __ bind(&is_nan);
3506 __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double());
3507
3508 __ bind(&not_nan);
3509 __ sdc1(value, MemOperand(scratch));
3510}
3511
3512
3513void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3514 LStoreKeyedSpecializedArrayElement* instr) {
3515
3516 Register external_pointer = ToRegister(instr->external_pointer());
3517 Register key = no_reg;
3518 ElementsKind elements_kind = instr->elements_kind();
3519 bool key_is_constant = instr->key()->IsConstantOperand();
3520 int constant_key = 0;
3521 if (key_is_constant) {
3522 constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3523 if (constant_key & 0xF0000000) {
3524 Abort("array index constant value too big.");
3525 }
3526 } else {
3527 key = ToRegister(instr->key());
3528 }
3529 int shift_size = ElementsKindToShiftSize(elements_kind);
3530
3531 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
3532 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3533 FPURegister value(ToDoubleRegister(instr->value()));
3534 if (key_is_constant) {
3535 __ Addu(scratch0(), external_pointer, constant_key * (1 << shift_size));
3536 } else {
3537 __ sll(scratch0(), key, shift_size);
3538 __ Addu(scratch0(), scratch0(), external_pointer);
3539 }
3540
3541 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3542 __ cvt_s_d(double_scratch0(), value);
3543 __ swc1(double_scratch0(), MemOperand(scratch0()));
3544 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
3545 __ sdc1(value, MemOperand(scratch0()));
3546 }
3547 } else {
3548 Register value(ToRegister(instr->value()));
3549 MemOperand mem_operand(zero_reg);
3550 Register scratch = scratch0();
3551 if (key_is_constant) {
3552 mem_operand = MemOperand(external_pointer,
3553 constant_key * (1 << shift_size));
3554 } else {
3555 __ sll(scratch, key, shift_size);
3556 __ Addu(scratch, scratch, external_pointer);
3557 mem_operand = MemOperand(scratch);
3558 }
3559 switch (elements_kind) {
3560 case EXTERNAL_PIXEL_ELEMENTS:
3561 case EXTERNAL_BYTE_ELEMENTS:
3562 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3563 __ sb(value, mem_operand);
3564 break;
3565 case EXTERNAL_SHORT_ELEMENTS:
3566 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3567 __ sh(value, mem_operand);
3568 break;
3569 case EXTERNAL_INT_ELEMENTS:
3570 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3571 __ sw(value, mem_operand);
3572 break;
3573 case EXTERNAL_FLOAT_ELEMENTS:
3574 case EXTERNAL_DOUBLE_ELEMENTS:
3575 case FAST_DOUBLE_ELEMENTS:
3576 case FAST_ELEMENTS:
3577 case FAST_SMI_ONLY_ELEMENTS:
3578 case DICTIONARY_ELEMENTS:
3579 case NON_STRICT_ARGUMENTS_ELEMENTS:
3580 UNREACHABLE();
3581 break;
3582 }
3583 }
3584}
3585
3586void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3587 ASSERT(ToRegister(instr->object()).is(a2));
3588 ASSERT(ToRegister(instr->key()).is(a1));
3589 ASSERT(ToRegister(instr->value()).is(a0));
3590
mstarzinger@chromium.org1b3afd12011-11-29 14:28:56 +00003591 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003592 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3593 : isolate()->builtins()->KeyedStoreIC_Initialize();
3594 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3595}
3596
3597
3598void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3599 Register object_reg = ToRegister(instr->object());
3600 Register new_map_reg = ToRegister(instr->new_map_reg());
3601 Register scratch = scratch0();
3602
3603 Handle<Map> from_map = instr->original_map();
3604 Handle<Map> to_map = instr->transitioned_map();
3605 ElementsKind from_kind = from_map->elements_kind();
3606 ElementsKind to_kind = to_map->elements_kind();
3607
3608 __ mov(ToRegister(instr->result()), object_reg);
3609
3610 Label not_applicable;
3611 __ lw(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset));
3612 __ Branch(&not_applicable, ne, scratch, Operand(from_map));
3613
3614 __ li(new_map_reg, Operand(to_map));
3615 if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
3616 __ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
3617 // Write barrier.
3618 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
3619 scratch, kRAHasBeenSaved, kDontSaveFPRegs);
3620 } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
3621 to_kind == FAST_DOUBLE_ELEMENTS) {
3622 Register fixed_object_reg = ToRegister(instr->temp_reg());
3623 ASSERT(fixed_object_reg.is(a2));
3624 ASSERT(new_map_reg.is(a3));
3625 __ mov(fixed_object_reg, object_reg);
3626 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3627 RelocInfo::CODE_TARGET, instr);
3628 } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
3629 Register fixed_object_reg = ToRegister(instr->temp_reg());
3630 ASSERT(fixed_object_reg.is(a2));
3631 ASSERT(new_map_reg.is(a3));
3632 __ mov(fixed_object_reg, object_reg);
3633 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
3634 RelocInfo::CODE_TARGET, instr);
3635 } else {
3636 UNREACHABLE();
3637 }
3638 __ bind(&not_applicable);
3639}
3640
3641
3642void LCodeGen::DoStringAdd(LStringAdd* instr) {
3643 __ push(ToRegister(instr->left()));
3644 __ push(ToRegister(instr->right()));
3645 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3646 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3647}
3648
3649
3650void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3651 class DeferredStringCharCodeAt: public LDeferredCode {
3652 public:
3653 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3654 : LDeferredCode(codegen), instr_(instr) { }
3655 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3656 virtual LInstruction* instr() { return instr_; }
3657 private:
3658 LStringCharCodeAt* instr_;
3659 };
3660
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003661 DeferredStringCharCodeAt* deferred =
3662 new DeferredStringCharCodeAt(this, instr);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00003663 StringCharLoadGenerator::Generate(masm(),
3664 ToRegister(instr->string()),
3665 ToRegister(instr->index()),
3666 ToRegister(instr->result()),
3667 deferred->entry());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003668 __ bind(deferred->exit());
3669}
3670
3671
3672void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3673 Register string = ToRegister(instr->string());
3674 Register result = ToRegister(instr->result());
3675 Register scratch = scratch0();
3676
3677 // TODO(3095996): Get rid of this. For now, we need to make the
3678 // result register contain a valid pointer because it is already
3679 // contained in the register pointer map.
3680 __ mov(result, zero_reg);
3681
3682 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3683 __ push(string);
3684 // Push the index as a smi. This is safe because of the checks in
3685 // DoStringCharCodeAt above.
3686 if (instr->index()->IsConstantOperand()) {
3687 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3688 __ Addu(scratch, zero_reg, Operand(Smi::FromInt(const_index)));
3689 __ push(scratch);
3690 } else {
3691 Register index = ToRegister(instr->index());
3692 __ SmiTag(index);
3693 __ push(index);
3694 }
3695 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
3696 if (FLAG_debug_code) {
3697 __ AbortIfNotSmi(v0);
3698 }
3699 __ SmiUntag(v0);
3700 __ StoreToSafepointRegisterSlot(v0, result);
3701}
3702
3703
3704void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3705 class DeferredStringCharFromCode: public LDeferredCode {
3706 public:
3707 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3708 : LDeferredCode(codegen), instr_(instr) { }
3709 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3710 virtual LInstruction* instr() { return instr_; }
3711 private:
3712 LStringCharFromCode* instr_;
3713 };
3714
3715 DeferredStringCharFromCode* deferred =
3716 new DeferredStringCharFromCode(this, instr);
3717
3718 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3719 Register char_code = ToRegister(instr->char_code());
3720 Register result = ToRegister(instr->result());
3721 Register scratch = scratch0();
3722 ASSERT(!char_code.is(result));
3723
3724 __ Branch(deferred->entry(), hi,
3725 char_code, Operand(String::kMaxAsciiCharCode));
3726 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3727 __ sll(scratch, char_code, kPointerSizeLog2);
3728 __ Addu(result, result, scratch);
3729 __ lw(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3730 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
3731 __ Branch(deferred->entry(), eq, result, Operand(scratch));
3732 __ bind(deferred->exit());
3733}
3734
3735
3736void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3737 Register char_code = ToRegister(instr->char_code());
3738 Register result = ToRegister(instr->result());
3739
3740 // TODO(3095996): Get rid of this. For now, we need to make the
3741 // result register contain a valid pointer because it is already
3742 // contained in the register pointer map.
3743 __ mov(result, zero_reg);
3744
3745 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3746 __ SmiTag(char_code);
3747 __ push(char_code);
3748 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
3749 __ StoreToSafepointRegisterSlot(v0, result);
3750}
3751
3752
3753void LCodeGen::DoStringLength(LStringLength* instr) {
3754 Register string = ToRegister(instr->InputAt(0));
3755 Register result = ToRegister(instr->result());
3756 __ lw(result, FieldMemOperand(string, String::kLengthOffset));
3757}
3758
3759
3760void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3761 LOperand* input = instr->InputAt(0);
3762 ASSERT(input->IsRegister() || input->IsStackSlot());
3763 LOperand* output = instr->result();
3764 ASSERT(output->IsDoubleRegister());
3765 FPURegister single_scratch = double_scratch0().low();
3766 if (input->IsStackSlot()) {
3767 Register scratch = scratch0();
3768 __ lw(scratch, ToMemOperand(input));
3769 __ mtc1(scratch, single_scratch);
3770 } else {
3771 __ mtc1(ToRegister(input), single_scratch);
3772 }
3773 __ cvt_d_w(ToDoubleRegister(output), single_scratch);
3774}
3775
3776
3777void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3778 class DeferredNumberTagI: public LDeferredCode {
3779 public:
3780 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3781 : LDeferredCode(codegen), instr_(instr) { }
3782 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3783 virtual LInstruction* instr() { return instr_; }
3784 private:
3785 LNumberTagI* instr_;
3786 };
3787
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003788 Register src = ToRegister(instr->InputAt(0));
3789 Register dst = ToRegister(instr->result());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003790 Register overflow = scratch0();
3791
3792 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003793 __ SmiTagCheckOverflow(dst, src, overflow);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003794 __ BranchOnOverflow(deferred->entry(), overflow);
3795 __ bind(deferred->exit());
3796}
3797
3798
3799void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3800 Label slow;
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003801 Register src = ToRegister(instr->InputAt(0));
3802 Register dst = ToRegister(instr->result());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003803 FPURegister dbl_scratch = double_scratch0();
3804
3805 // Preserve the value of all registers.
3806 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3807
3808 // There was overflow, so bits 30 and 31 of the original integer
3809 // disagree. Try to allocate a heap number in new space and store
3810 // the value in there. If that fails, call the runtime system.
3811 Label done;
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003812 if (dst.is(src)) {
3813 __ SmiUntag(src, dst);
3814 __ Xor(src, src, Operand(0x80000000));
3815 }
3816 __ mtc1(src, dbl_scratch);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003817 __ cvt_d_w(dbl_scratch, dbl_scratch);
3818 if (FLAG_inline_new) {
3819 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
3820 __ AllocateHeapNumber(t1, a3, t0, t2, &slow);
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003821 __ Move(dst, t1);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003822 __ Branch(&done);
3823 }
3824
3825 // Slow case: Call the runtime system to do the number allocation.
3826 __ bind(&slow);
3827
3828 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3829 // register is stored, as this register is in the pointer map, but contains an
3830 // integer value.
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003831 __ StoreToSafepointRegisterSlot(zero_reg, dst);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003832 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003833 __ Move(dst, v0);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003834
3835 // Done. Put the value in dbl_scratch into the value of the allocated heap
3836 // number.
3837 __ bind(&done);
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003838 __ sdc1(dbl_scratch, FieldMemOperand(dst, HeapNumber::kValueOffset));
3839 __ StoreToSafepointRegisterSlot(dst, dst);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003840}
3841
3842
3843void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3844 class DeferredNumberTagD: public LDeferredCode {
3845 public:
3846 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3847 : LDeferredCode(codegen), instr_(instr) { }
3848 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3849 virtual LInstruction* instr() { return instr_; }
3850 private:
3851 LNumberTagD* instr_;
3852 };
3853
3854 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3855 Register scratch = scratch0();
3856 Register reg = ToRegister(instr->result());
3857 Register temp1 = ToRegister(instr->TempAt(0));
3858 Register temp2 = ToRegister(instr->TempAt(1));
3859
3860 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3861 if (FLAG_inline_new) {
3862 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3863 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3864 } else {
3865 __ Branch(deferred->entry());
3866 }
3867 __ bind(deferred->exit());
3868 __ sdc1(input_reg, FieldMemOperand(reg, HeapNumber::kValueOffset));
3869}
3870
3871
3872void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3873 // TODO(3095996): Get rid of this. For now, we need to make the
3874 // result register contain a valid pointer because it is already
3875 // contained in the register pointer map.
3876 Register reg = ToRegister(instr->result());
3877 __ mov(reg, zero_reg);
3878
3879 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3880 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
3881 __ StoreToSafepointRegisterSlot(v0, reg);
3882}
3883
3884
3885void LCodeGen::DoSmiTag(LSmiTag* instr) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003886 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003887 __ SmiTag(ToRegister(instr->result()), ToRegister(instr->InputAt(0)));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003888}
3889
3890
3891void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3892 Register scratch = scratch0();
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003893 Register input = ToRegister(instr->InputAt(0));
3894 Register result = ToRegister(instr->result());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003895 if (instr->needs_check()) {
3896 STATIC_ASSERT(kHeapObjectTag == 1);
3897 // If the input is a HeapObject, value of scratch won't be zero.
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003898 __ And(scratch, input, Operand(kHeapObjectTag));
3899 __ SmiUntag(result, input);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003900 DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
3901 } else {
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003902 __ SmiUntag(result, input);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003903 }
3904}
3905
3906
3907void LCodeGen::EmitNumberUntagD(Register input_reg,
3908 DoubleRegister result_reg,
3909 bool deoptimize_on_undefined,
ulan@chromium.org2efb9002012-01-19 15:36:35 +00003910 bool deoptimize_on_minus_zero,
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003911 LEnvironment* env) {
3912 Register scratch = scratch0();
3913
3914 Label load_smi, heap_number, done;
3915
3916 // Smi check.
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003917 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003918
3919 // Heap number map check.
3920 __ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
3921 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3922 if (deoptimize_on_undefined) {
3923 DeoptimizeIf(ne, env, scratch, Operand(at));
3924 } else {
3925 Label heap_number;
3926 __ Branch(&heap_number, eq, scratch, Operand(at));
3927
3928 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3929 DeoptimizeIf(ne, env, input_reg, Operand(at));
3930
3931 // Convert undefined to NaN.
3932 __ LoadRoot(at, Heap::kNanValueRootIndex);
3933 __ ldc1(result_reg, FieldMemOperand(at, HeapNumber::kValueOffset));
3934 __ Branch(&done);
3935
3936 __ bind(&heap_number);
3937 }
3938 // Heap number to double register conversion.
3939 __ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset));
ulan@chromium.org2efb9002012-01-19 15:36:35 +00003940 if (deoptimize_on_minus_zero) {
3941 __ mfc1(at, result_reg.low());
3942 __ Branch(&done, ne, at, Operand(zero_reg));
3943 __ mfc1(scratch, result_reg.high());
3944 DeoptimizeIf(eq, env, scratch, Operand(HeapNumber::kSignMask));
3945 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003946 __ Branch(&done);
3947
3948 // Smi to double register conversion
3949 __ bind(&load_smi);
danno@chromium.orgfa458e42012-02-01 10:48:36 +00003950 // scratch: untagged value of input_reg
3951 __ mtc1(scratch, result_reg);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003952 __ cvt_d_w(result_reg, result_reg);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00003953 __ bind(&done);
3954}
3955
3956
3957void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3958 Register input_reg = ToRegister(instr->InputAt(0));
3959 Register scratch1 = scratch0();
3960 Register scratch2 = ToRegister(instr->TempAt(0));
3961 DoubleRegister double_scratch = double_scratch0();
3962 FPURegister single_scratch = double_scratch.low();
3963
3964 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3965 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3966
3967 Label done;
3968
3969 // The input is a tagged HeapObject.
3970 // Heap number map check.
3971 __ lw(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
3972 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3973 // This 'at' value and scratch1 map value are used for tests in both clauses
3974 // of the if.
3975
3976 if (instr->truncating()) {
3977 Register scratch3 = ToRegister(instr->TempAt(1));
3978 DoubleRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3979 ASSERT(!scratch3.is(input_reg) &&
3980 !scratch3.is(scratch1) &&
3981 !scratch3.is(scratch2));
3982 // Performs a truncating conversion of a floating point number as used by
3983 // the JS bitwise operations.
3984 Label heap_number;
3985 __ Branch(&heap_number, eq, scratch1, Operand(at)); // HeapNumber map?
3986 // Check for undefined. Undefined is converted to zero for truncating
3987 // conversions.
3988 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3989 DeoptimizeIf(ne, instr->environment(), input_reg, Operand(at));
3990 ASSERT(ToRegister(instr->result()).is(input_reg));
3991 __ mov(input_reg, zero_reg);
3992 __ Branch(&done);
3993
3994 __ bind(&heap_number);
3995 __ ldc1(double_scratch2,
3996 FieldMemOperand(input_reg, HeapNumber::kValueOffset));
3997 __ EmitECMATruncate(input_reg,
3998 double_scratch2,
3999 single_scratch,
4000 scratch1,
4001 scratch2,
4002 scratch3);
4003 } else {
4004 // Deoptimize if we don't have a heap number.
4005 DeoptimizeIf(ne, instr->environment(), scratch1, Operand(at));
4006
4007 // Load the double value.
4008 __ ldc1(double_scratch,
4009 FieldMemOperand(input_reg, HeapNumber::kValueOffset));
4010
4011 Register except_flag = scratch2;
4012 __ EmitFPUTruncate(kRoundToZero,
4013 single_scratch,
4014 double_scratch,
4015 scratch1,
4016 except_flag,
4017 kCheckForInexactConversion);
4018
4019 // Deopt if the operation did not succeed.
4020 DeoptimizeIf(ne, instr->environment(), except_flag, Operand(zero_reg));
4021
4022 // Load the result.
4023 __ mfc1(input_reg, single_scratch);
4024
4025 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
4026 __ Branch(&done, ne, input_reg, Operand(zero_reg));
4027
4028 __ mfc1(scratch1, double_scratch.high());
4029 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
4030 DeoptimizeIf(ne, instr->environment(), scratch1, Operand(zero_reg));
4031 }
4032 }
4033 __ bind(&done);
4034}
4035
4036
4037void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4038 class DeferredTaggedToI: public LDeferredCode {
4039 public:
4040 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
4041 : LDeferredCode(codegen), instr_(instr) { }
4042 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
4043 virtual LInstruction* instr() { return instr_; }
4044 private:
4045 LTaggedToI* instr_;
4046 };
4047
4048 LOperand* input = instr->InputAt(0);
4049 ASSERT(input->IsRegister());
4050 ASSERT(input->Equals(instr->result()));
4051
4052 Register input_reg = ToRegister(input);
4053
4054 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
4055
4056 // Let the deferred code handle the HeapObject case.
4057 __ JumpIfNotSmi(input_reg, deferred->entry());
4058
4059 // Smi to int32 conversion.
4060 __ SmiUntag(input_reg);
4061 __ bind(deferred->exit());
4062}
4063
4064
4065void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
4066 LOperand* input = instr->InputAt(0);
4067 ASSERT(input->IsRegister());
4068 LOperand* result = instr->result();
4069 ASSERT(result->IsDoubleRegister());
4070
4071 Register input_reg = ToRegister(input);
4072 DoubleRegister result_reg = ToDoubleRegister(result);
4073
4074 EmitNumberUntagD(input_reg, result_reg,
4075 instr->hydrogen()->deoptimize_on_undefined(),
ulan@chromium.org2efb9002012-01-19 15:36:35 +00004076 instr->hydrogen()->deoptimize_on_minus_zero(),
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004077 instr->environment());
4078}
4079
4080
4081void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
4082 Register result_reg = ToRegister(instr->result());
4083 Register scratch1 = scratch0();
4084 Register scratch2 = ToRegister(instr->TempAt(0));
4085 DoubleRegister double_input = ToDoubleRegister(instr->InputAt(0));
4086 DoubleRegister double_scratch = double_scratch0();
4087 FPURegister single_scratch = double_scratch0().low();
4088
4089 if (instr->truncating()) {
4090 Register scratch3 = ToRegister(instr->TempAt(1));
4091 __ EmitECMATruncate(result_reg,
4092 double_input,
4093 single_scratch,
4094 scratch1,
4095 scratch2,
4096 scratch3);
4097 } else {
4098 Register except_flag = scratch2;
4099
4100 __ EmitFPUTruncate(kRoundToMinusInf,
4101 single_scratch,
4102 double_input,
4103 scratch1,
4104 except_flag,
4105 kCheckForInexactConversion);
4106
4107 // Deopt if the operation did not succeed (except_flag != 0).
4108 DeoptimizeIf(ne, instr->environment(), except_flag, Operand(zero_reg));
4109
4110 // Load the result.
4111 __ mfc1(result_reg, single_scratch);
4112 }
4113}
4114
4115
4116void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
4117 LOperand* input = instr->InputAt(0);
4118 __ And(at, ToRegister(input), Operand(kSmiTagMask));
4119 DeoptimizeIf(ne, instr->environment(), at, Operand(zero_reg));
4120}
4121
4122
4123void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4124 LOperand* input = instr->InputAt(0);
4125 __ And(at, ToRegister(input), Operand(kSmiTagMask));
4126 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
4127}
4128
4129
4130void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4131 Register input = ToRegister(instr->InputAt(0));
4132 Register scratch = scratch0();
4133
4134 __ GetObjectType(input, scratch, scratch);
4135
4136 if (instr->hydrogen()->is_interval_check()) {
4137 InstanceType first;
4138 InstanceType last;
4139 instr->hydrogen()->GetCheckInterval(&first, &last);
4140
4141 // If there is only one type in the interval check for equality.
4142 if (first == last) {
4143 DeoptimizeIf(ne, instr->environment(), scratch, Operand(first));
4144 } else {
4145 DeoptimizeIf(lo, instr->environment(), scratch, Operand(first));
4146 // Omit check for the last type.
4147 if (last != LAST_TYPE) {
4148 DeoptimizeIf(hi, instr->environment(), scratch, Operand(last));
4149 }
4150 }
4151 } else {
4152 uint8_t mask;
4153 uint8_t tag;
4154 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4155
4156 if (IsPowerOf2(mask)) {
4157 ASSERT(tag == 0 || IsPowerOf2(tag));
4158 __ And(at, scratch, mask);
4159 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment(),
4160 at, Operand(zero_reg));
4161 } else {
4162 __ And(scratch, scratch, Operand(mask));
4163 DeoptimizeIf(ne, instr->environment(), scratch, Operand(tag));
4164 }
4165 }
4166}
4167
4168
4169void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004170 Register reg = ToRegister(instr->value());
4171 Handle<JSFunction> target = instr->hydrogen()->target();
4172 if (isolate()->heap()->InNewSpace(*target)) {
4173 Register reg = ToRegister(instr->value());
4174 Handle<JSGlobalPropertyCell> cell =
4175 isolate()->factory()->NewJSGlobalPropertyCell(target);
4176 __ li(at, Operand(Handle<Object>(cell)));
4177 __ lw(at, FieldMemOperand(at, JSGlobalPropertyCell::kValueOffset));
4178 DeoptimizeIf(ne, instr->environment(), reg,
4179 Operand(at));
4180 } else {
4181 DeoptimizeIf(ne, instr->environment(), reg,
4182 Operand(target));
4183 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004184}
4185
4186
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00004187void LCodeGen::DoCheckMapCommon(Register reg,
4188 Register scratch,
4189 Handle<Map> map,
4190 CompareMapMode mode,
4191 LEnvironment* env) {
4192 Label success;
4193 __ CompareMapAndBranch(reg, scratch, map, &success, eq, &success, mode);
4194 DeoptimizeIf(al, env);
4195 __ bind(&success);
4196}
4197
4198
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004199void LCodeGen::DoCheckMap(LCheckMap* instr) {
4200 Register scratch = scratch0();
4201 LOperand* input = instr->InputAt(0);
4202 ASSERT(input->IsRegister());
4203 Register reg = ToRegister(input);
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00004204 Handle<Map> map = instr->hydrogen()->map();
4205 DoCheckMapCommon(reg, scratch, map, instr->hydrogen()->mode(),
4206 instr->environment());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004207}
4208
4209
4210void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4211 DoubleRegister value_reg = ToDoubleRegister(instr->unclamped());
4212 Register result_reg = ToRegister(instr->result());
4213 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4214 __ ClampDoubleToUint8(result_reg, value_reg, temp_reg);
4215}
4216
4217
4218void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4219 Register unclamped_reg = ToRegister(instr->unclamped());
4220 Register result_reg = ToRegister(instr->result());
4221 __ ClampUint8(result_reg, unclamped_reg);
4222}
4223
4224
4225void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4226 Register scratch = scratch0();
4227 Register input_reg = ToRegister(instr->unclamped());
4228 Register result_reg = ToRegister(instr->result());
4229 DoubleRegister temp_reg = ToDoubleRegister(instr->TempAt(0));
4230 Label is_smi, done, heap_number;
4231
4232 // Both smi and heap number cases are handled.
danno@chromium.orgfa458e42012-02-01 10:48:36 +00004233 __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004234
4235 // Check for heap number
4236 __ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4237 __ Branch(&heap_number, eq, scratch, Operand(factory()->heap_number_map()));
4238
4239 // Check for undefined. Undefined is converted to zero for clamping
4240 // conversions.
4241 DeoptimizeIf(ne, instr->environment(), input_reg,
4242 Operand(factory()->undefined_value()));
4243 __ mov(result_reg, zero_reg);
4244 __ jmp(&done);
4245
4246 // Heap number
4247 __ bind(&heap_number);
4248 __ ldc1(double_scratch0(), FieldMemOperand(input_reg,
4249 HeapNumber::kValueOffset));
4250 __ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
4251 __ jmp(&done);
4252
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004253 __ bind(&is_smi);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004254 __ ClampUint8(result_reg, scratch);
4255
4256 __ bind(&done);
4257}
4258
4259
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004260void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
4261 Register temp1 = ToRegister(instr->TempAt(0));
4262 Register temp2 = ToRegister(instr->TempAt(1));
4263
4264 Handle<JSObject> holder = instr->holder();
4265 Handle<JSObject> current_prototype = instr->prototype();
4266
4267 // Load prototype object.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004268 __ LoadHeapObject(temp1, current_prototype);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004269
4270 // Check prototype maps up to the holder.
4271 while (!current_prototype.is_identical_to(holder)) {
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00004272 DoCheckMapCommon(temp1, temp2,
4273 Handle<Map>(current_prototype->map()),
4274 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004275 current_prototype =
4276 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4277 // Load next prototype object.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004278 __ LoadHeapObject(temp1, current_prototype);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004279 }
4280
4281 // Check the holder map.
jkummerow@chromium.org05ed9dd2012-01-23 14:42:48 +00004282 DoCheckMapCommon(temp1, temp2,
4283 Handle<Map>(current_prototype->map()),
4284 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004285}
4286
4287
ulan@chromium.org967e2702012-02-28 09:49:15 +00004288void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4289 class DeferredAllocateObject: public LDeferredCode {
4290 public:
4291 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4292 : LDeferredCode(codegen), instr_(instr) { }
4293 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4294 virtual LInstruction* instr() { return instr_; }
4295 private:
4296 LAllocateObject* instr_;
4297 };
4298
4299 DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
4300
fschneider@chromium.org35814e52012-03-01 15:43:35 +00004301 Register result = ToRegister(instr->result());
4302 Register scratch = ToRegister(instr->TempAt(0));
4303 Register scratch2 = ToRegister(instr->TempAt(1));
4304 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4305 Handle<Map> initial_map(constructor->initial_map());
4306 int instance_size = initial_map->instance_size();
4307 ASSERT(initial_map->pre_allocated_property_fields() +
4308 initial_map->unused_property_fields() -
4309 initial_map->inobject_properties() == 0);
4310
4311 // Allocate memory for the object. The initial map might change when
4312 // the constructor's prototype changes, but instance size and property
4313 // counts remain unchanged (if slack tracking finished).
4314 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
4315 __ AllocateInNewSpace(instance_size,
4316 result,
4317 scratch,
4318 scratch2,
4319 deferred->entry(),
4320 TAG_OBJECT);
4321
4322 // Load the initial map.
4323 Register map = scratch;
4324 __ LoadHeapObject(map, constructor);
4325 __ lw(map, FieldMemOperand(map, JSFunction::kPrototypeOrInitialMapOffset));
4326
4327 // Initialize map and fields of the newly allocated object.
4328 ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
4329 __ sw(map, FieldMemOperand(result, JSObject::kMapOffset));
4330 __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
4331 __ sw(scratch, FieldMemOperand(result, JSObject::kElementsOffset));
4332 __ sw(scratch, FieldMemOperand(result, JSObject::kPropertiesOffset));
4333 if (initial_map->inobject_properties() != 0) {
4334 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4335 for (int i = 0; i < initial_map->inobject_properties(); i++) {
4336 int property_offset = JSObject::kHeaderSize + i * kPointerSize;
4337 __ sw(scratch, FieldMemOperand(result, property_offset));
4338 }
4339 }
ulan@chromium.org967e2702012-02-28 09:49:15 +00004340
4341 __ bind(deferred->exit());
4342}
4343
4344
4345void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4346 Register result = ToRegister(instr->result());
4347 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4348
4349 // TODO(3095996): Get rid of this. For now, we need to make the
4350 // result register contain a valid pointer because it is already
4351 // contained in the register pointer map.
4352 __ mov(result, zero_reg);
4353
4354 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4355 __ LoadHeapObject(a0, constructor);
4356 __ push(a0);
4357 CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
4358 __ StoreToSafepointRegisterSlot(v0, result);
4359}
4360
4361
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004362void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004363 Heap* heap = isolate()->heap();
4364 ElementsKind boilerplate_elements_kind =
4365 instr->hydrogen()->boilerplate_elements_kind();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004366
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004367 // Deopt if the array literal boilerplate ElementsKind is of a type different
4368 // than the expected one. The check isn't necessary if the boilerplate has
4369 // already been converted to FAST_ELEMENTS.
4370 if (boilerplate_elements_kind != FAST_ELEMENTS) {
4371 __ LoadHeapObject(a1, instr->hydrogen()->boilerplate_object());
4372 // Load map into a2.
4373 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
4374 // Load the map's "bit field 2".
4375 __ lbu(a2, FieldMemOperand(a2, Map::kBitField2Offset));
4376 // Retrieve elements_kind from bit field 2.
4377 __ Ext(a2, a2, Map::kElementsKindShift, Map::kElementsKindBitCount);
4378 DeoptimizeIf(ne,
4379 instr->environment(),
4380 a2,
4381 Operand(boilerplate_elements_kind));
4382 }
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004383 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4384 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
4385 __ li(a2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004386 // Boilerplate already exists, constant elements are never accessed.
4387 // Pass an empty fixed array.
4388 __ li(a1, Operand(Handle<FixedArray>(heap->empty_fixed_array())));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004389 __ Push(a3, a2, a1);
4390
4391 // Pick the right runtime function or stub to call.
4392 int length = instr->hydrogen()->length();
4393 if (instr->hydrogen()->IsCopyOnWrite()) {
4394 ASSERT(instr->hydrogen()->depth() == 1);
4395 FastCloneShallowArrayStub::Mode mode =
4396 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4397 FastCloneShallowArrayStub stub(mode, length);
4398 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4399 } else if (instr->hydrogen()->depth() > 1) {
4400 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
4401 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4402 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
4403 } else {
4404 FastCloneShallowArrayStub::Mode mode =
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004405 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
4406 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
4407 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004408 FastCloneShallowArrayStub stub(mode, length);
4409 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4410 }
4411}
4412
4413
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004414void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4415 Register result,
4416 Register source,
4417 int* offset) {
4418 ASSERT(!source.is(a2));
4419 ASSERT(!result.is(a2));
4420
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00004421 // Only elements backing stores for non-COW arrays need to be copied.
4422 Handle<FixedArrayBase> elements(object->elements());
4423 bool has_elements = elements->length() > 0 &&
4424 elements->map() != isolate()->heap()->fixed_cow_array_map();
4425
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004426 // Increase the offset so that subsequent objects end up right after
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00004427 // this object and its backing store.
4428 int object_offset = *offset;
4429 int object_size = object->map()->instance_size();
4430 int elements_offset = *offset + object_size;
4431 int elements_size = has_elements ? elements->Size() : 0;
4432 *offset += object_size + elements_size;
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004433
4434 // Copy object header.
4435 ASSERT(object->properties()->length() == 0);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004436 int inobject_properties = object->map()->inobject_properties();
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00004437 int header_size = object_size - inobject_properties * kPointerSize;
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004438 for (int i = 0; i < header_size; i += kPointerSize) {
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00004439 if (has_elements && i == JSObject::kElementsOffset) {
4440 __ Addu(a2, result, Operand(elements_offset));
4441 } else {
4442 __ lw(a2, FieldMemOperand(source, i));
4443 }
4444 __ sw(a2, FieldMemOperand(result, object_offset + i));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004445 }
4446
4447 // Copy in-object properties.
4448 for (int i = 0; i < inobject_properties; i++) {
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00004449 int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004450 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
4451 if (value->IsJSObject()) {
4452 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4453 __ Addu(a2, result, Operand(*offset));
4454 __ sw(a2, FieldMemOperand(result, total_offset));
4455 __ LoadHeapObject(source, value_object);
4456 EmitDeepCopy(value_object, result, source, offset);
4457 } else if (value->IsHeapObject()) {
4458 __ LoadHeapObject(a2, Handle<HeapObject>::cast(value));
4459 __ sw(a2, FieldMemOperand(result, total_offset));
4460 } else {
4461 __ li(a2, Operand(value));
4462 __ sw(a2, FieldMemOperand(result, total_offset));
4463 }
4464 }
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00004465
4466
4467 // Copy elements backing store header.
4468 ASSERT(!has_elements || elements->IsFixedArray());
4469 if (has_elements) {
4470 __ LoadHeapObject(source, elements);
4471 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
4472 __ lw(a2, FieldMemOperand(source, i));
4473 __ sw(a2, FieldMemOperand(result, elements_offset + i));
4474 }
4475 }
4476
4477 // Copy elements backing store content.
4478 ASSERT(!has_elements || elements->IsFixedArray());
4479 int elements_length = has_elements ? elements->length() : 0;
4480 for (int i = 0; i < elements_length; i++) {
4481 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
4482 Handle<Object> value = JSObject::GetElement(object, i);
4483 if (value->IsJSObject()) {
4484 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4485 __ Addu(a2, result, Operand(*offset));
4486 __ sw(a2, FieldMemOperand(result, total_offset));
4487 __ LoadHeapObject(source, value_object);
4488 EmitDeepCopy(value_object, result, source, offset);
4489 } else if (value->IsHeapObject()) {
4490 __ LoadHeapObject(a2, Handle<HeapObject>::cast(value));
4491 __ sw(a2, FieldMemOperand(result, total_offset));
4492 } else {
4493 __ li(a2, Operand(value));
4494 __ sw(a2, FieldMemOperand(result, total_offset));
4495 }
4496 }
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004497}
4498
4499
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00004500void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004501 int size = instr->hydrogen()->total_size();
4502
4503 // Allocate all objects that are part of the literal in one big
4504 // allocation. This avoids multiple limit checks.
4505 Label allocated, runtime_allocate;
4506 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
4507 __ jmp(&allocated);
4508
4509 __ bind(&runtime_allocate);
4510 __ li(a0, Operand(Smi::FromInt(size)));
4511 __ push(a0);
4512 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4513
4514 __ bind(&allocated);
4515 int offset = 0;
4516 __ LoadHeapObject(a1, instr->hydrogen()->boilerplate());
4517 EmitDeepCopy(instr->hydrogen()->boilerplate(), v0, a1, &offset);
4518 ASSERT_EQ(size, offset);
4519}
4520
4521
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +00004522void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004523 ASSERT(ToRegister(instr->result()).is(v0));
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004524 Handle<FixedArray> literals(instr->environment()->closure()->literals());
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004525 Handle<FixedArray> constant_properties =
4526 instr->hydrogen()->constant_properties();
4527
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004528 // Set up the parameters to the stub/runtime call.
4529 __ LoadHeapObject(t0, literals);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004530 __ li(a3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004531 __ li(a2, Operand(constant_properties));
4532 int flags = instr->hydrogen()->fast_elements()
4533 ? ObjectLiteral::kFastElements
4534 : ObjectLiteral::kNoFlags;
4535 __ li(a1, Operand(Smi::FromInt(flags)));
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004536 __ Push(t0, a3, a2, a1);
4537
ulan@chromium.org65a89c22012-02-14 11:46:07 +00004538 // Pick the right runtime function or stub to call.
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004539 int properties_count = constant_properties->length() / 2;
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004540 if (instr->hydrogen()->depth() > 1) {
4541 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004542 } else if (flags != ObjectLiteral::kFastElements ||
4543 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004544 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
ricow@chromium.org64e3a4b2011-12-13 08:07:27 +00004545 } else {
4546 FastCloneShallowObjectStub stub(properties_count);
4547 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004548 }
4549}
4550
4551
4552void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4553 ASSERT(ToRegister(instr->InputAt(0)).is(a0));
4554 ASSERT(ToRegister(instr->result()).is(v0));
4555 __ push(a0);
4556 CallRuntime(Runtime::kToFastProperties, 1, instr);
4557}
4558
4559
4560void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
4561 Label materialized;
4562 // Registers will be used as follows:
4563 // a3 = JS function.
4564 // t3 = literals array.
4565 // a1 = regexp literal.
4566 // a0 = regexp literal clone.
4567 // a2 and t0-t2 are used as temporaries.
4568 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4569 __ lw(t3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
4570 int literal_offset = FixedArray::kHeaderSize +
4571 instr->hydrogen()->literal_index() * kPointerSize;
4572 __ lw(a1, FieldMemOperand(t3, literal_offset));
4573 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4574 __ Branch(&materialized, ne, a1, Operand(at));
4575
4576 // Create regexp literal using runtime function
4577 // Result will be in v0.
4578 __ li(t2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
4579 __ li(t1, Operand(instr->hydrogen()->pattern()));
4580 __ li(t0, Operand(instr->hydrogen()->flags()));
4581 __ Push(t3, t2, t1, t0);
4582 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
4583 __ mov(a1, v0);
4584
4585 __ bind(&materialized);
4586 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4587 Label allocated, runtime_allocate;
4588
4589 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
4590 __ jmp(&allocated);
4591
4592 __ bind(&runtime_allocate);
4593 __ li(a0, Operand(Smi::FromInt(size)));
4594 __ Push(a1, a0);
4595 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4596 __ pop(a1);
4597
4598 __ bind(&allocated);
4599 // Copy the content into the newly allocated memory.
4600 // (Unroll copy loop once for better throughput).
4601 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4602 __ lw(a3, FieldMemOperand(a1, i));
4603 __ lw(a2, FieldMemOperand(a1, i + kPointerSize));
4604 __ sw(a3, FieldMemOperand(v0, i));
4605 __ sw(a2, FieldMemOperand(v0, i + kPointerSize));
4606 }
4607 if ((size % (2 * kPointerSize)) != 0) {
4608 __ lw(a3, FieldMemOperand(a1, size - kPointerSize));
4609 __ sw(a3, FieldMemOperand(v0, size - kPointerSize));
4610 }
4611}
4612
4613
4614void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
4615 // Use the fast case closure allocation code that allocates in new
4616 // space for nested functions that don't need literals cloning.
4617 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
4618 bool pretenure = instr->hydrogen()->pretenure();
4619 if (!pretenure && shared_info->num_literals() == 0) {
mstarzinger@chromium.org1b3afd12011-11-29 14:28:56 +00004620 FastNewClosureStub stub(shared_info->language_mode());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004621 __ li(a1, Operand(shared_info));
4622 __ push(a1);
4623 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4624 } else {
4625 __ li(a2, Operand(shared_info));
4626 __ li(a1, Operand(pretenure
4627 ? factory()->true_value()
4628 : factory()->false_value()));
4629 __ Push(cp, a2, a1);
4630 CallRuntime(Runtime::kNewClosure, 3, instr);
4631 }
4632}
4633
4634
4635void LCodeGen::DoTypeof(LTypeof* instr) {
4636 ASSERT(ToRegister(instr->result()).is(v0));
4637 Register input = ToRegister(instr->InputAt(0));
4638 __ push(input);
4639 CallRuntime(Runtime::kTypeof, 1, instr);
4640}
4641
4642
4643void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
4644 Register input = ToRegister(instr->InputAt(0));
4645 int true_block = chunk_->LookupDestination(instr->true_block_id());
4646 int false_block = chunk_->LookupDestination(instr->false_block_id());
4647 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4648 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4649
4650 Register cmp1 = no_reg;
4651 Operand cmp2 = Operand(no_reg);
4652
4653 Condition final_branch_condition = EmitTypeofIs(true_label,
4654 false_label,
4655 input,
4656 instr->type_literal(),
4657 cmp1,
4658 cmp2);
4659
4660 ASSERT(cmp1.is_valid());
4661 ASSERT(!cmp2.is_reg() || cmp2.rm().is_valid());
4662
4663 if (final_branch_condition != kNoCondition) {
4664 EmitBranch(true_block, false_block, final_branch_condition, cmp1, cmp2);
4665 }
4666}
4667
4668
4669Condition LCodeGen::EmitTypeofIs(Label* true_label,
4670 Label* false_label,
4671 Register input,
4672 Handle<String> type_name,
4673 Register& cmp1,
4674 Operand& cmp2) {
4675 // This function utilizes the delay slot heavily. This is used to load
4676 // values that are always usable without depending on the type of the input
4677 // register.
4678 Condition final_branch_condition = kNoCondition;
4679 Register scratch = scratch0();
4680 if (type_name->Equals(heap()->number_symbol())) {
4681 __ JumpIfSmi(input, true_label);
4682 __ lw(input, FieldMemOperand(input, HeapObject::kMapOffset));
4683 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4684 cmp1 = input;
4685 cmp2 = Operand(at);
4686 final_branch_condition = eq;
4687
4688 } else if (type_name->Equals(heap()->string_symbol())) {
4689 __ JumpIfSmi(input, false_label);
4690 __ GetObjectType(input, input, scratch);
4691 __ Branch(USE_DELAY_SLOT, false_label,
4692 ge, scratch, Operand(FIRST_NONSTRING_TYPE));
4693 // input is an object so we can load the BitFieldOffset even if we take the
4694 // other branch.
4695 __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
4696 __ And(at, at, 1 << Map::kIsUndetectable);
4697 cmp1 = at;
4698 cmp2 = Operand(zero_reg);
4699 final_branch_condition = eq;
4700
4701 } else if (type_name->Equals(heap()->boolean_symbol())) {
4702 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4703 __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
4704 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4705 cmp1 = at;
4706 cmp2 = Operand(input);
4707 final_branch_condition = eq;
4708
4709 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4710 __ LoadRoot(at, Heap::kNullValueRootIndex);
4711 cmp1 = at;
4712 cmp2 = Operand(input);
4713 final_branch_condition = eq;
4714
4715 } else if (type_name->Equals(heap()->undefined_symbol())) {
4716 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4717 __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
4718 // The first instruction of JumpIfSmi is an And - it is safe in the delay
4719 // slot.
4720 __ JumpIfSmi(input, false_label);
4721 // Check for undetectable objects => true.
4722 __ lw(input, FieldMemOperand(input, HeapObject::kMapOffset));
4723 __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
4724 __ And(at, at, 1 << Map::kIsUndetectable);
4725 cmp1 = at;
4726 cmp2 = Operand(zero_reg);
4727 final_branch_condition = ne;
4728
4729 } else if (type_name->Equals(heap()->function_symbol())) {
4730 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4731 __ JumpIfSmi(input, false_label);
4732 __ GetObjectType(input, scratch, input);
4733 __ Branch(true_label, eq, input, Operand(JS_FUNCTION_TYPE));
4734 cmp1 = input;
4735 cmp2 = Operand(JS_FUNCTION_PROXY_TYPE);
4736 final_branch_condition = eq;
4737
4738 } else if (type_name->Equals(heap()->object_symbol())) {
4739 __ JumpIfSmi(input, false_label);
4740 if (!FLAG_harmony_typeof) {
4741 __ LoadRoot(at, Heap::kNullValueRootIndex);
4742 __ Branch(USE_DELAY_SLOT, true_label, eq, at, Operand(input));
4743 }
4744 // input is an object, it is safe to use GetObjectType in the delay slot.
4745 __ GetObjectType(input, input, scratch);
4746 __ Branch(USE_DELAY_SLOT, false_label,
4747 lt, scratch, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4748 // Still an object, so the InstanceType can be loaded.
4749 __ lbu(scratch, FieldMemOperand(input, Map::kInstanceTypeOffset));
4750 __ Branch(USE_DELAY_SLOT, false_label,
4751 gt, scratch, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4752 // Still an object, so the BitField can be loaded.
4753 // Check for undetectable objects => false.
4754 __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
4755 __ And(at, at, 1 << Map::kIsUndetectable);
4756 cmp1 = at;
4757 cmp2 = Operand(zero_reg);
4758 final_branch_condition = eq;
4759
4760 } else {
4761 cmp1 = at;
4762 cmp2 = Operand(zero_reg); // Set to valid regs, to avoid caller assertion.
4763 __ Branch(false_label);
4764 }
4765
4766 return final_branch_condition;
4767}
4768
4769
4770void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4771 Register temp1 = ToRegister(instr->TempAt(0));
4772 int true_block = chunk_->LookupDestination(instr->true_block_id());
4773 int false_block = chunk_->LookupDestination(instr->false_block_id());
4774
4775 EmitIsConstructCall(temp1, scratch0());
4776
4777 EmitBranch(true_block, false_block, eq, temp1,
4778 Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4779}
4780
4781
4782void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4783 ASSERT(!temp1.is(temp2));
4784 // Get the frame pointer for the calling frame.
4785 __ lw(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4786
4787 // Skip the arguments adaptor frame if it exists.
4788 Label check_frame_marker;
4789 __ lw(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4790 __ Branch(&check_frame_marker, ne, temp2,
4791 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4792 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4793
4794 // Check the marker in the calling frame.
4795 __ bind(&check_frame_marker);
4796 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4797}
4798
4799
erikcorry0ad885c2011-11-21 13:51:57 +00004800void LCodeGen::EnsureSpaceForLazyDeopt() {
4801 // Ensure that we have enough space after the previous lazy-bailout
4802 // instruction for patching the code here.
4803 int current_pc = masm()->pc_offset();
4804 int patch_size = Deoptimizer::patch_size();
4805 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4806 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
4807 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
4808 while (padding_size > 0) {
4809 __ nop();
4810 padding_size -= Assembler::kInstrSize;
4811 }
4812 }
mstarzinger@chromium.org1b3afd12011-11-29 14:28:56 +00004813 last_lazy_deopt_pc_ = masm()->pc_offset();
erikcorry0ad885c2011-11-21 13:51:57 +00004814}
4815
4816
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004817void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
erikcorry0ad885c2011-11-21 13:51:57 +00004818 EnsureSpaceForLazyDeopt();
4819 ASSERT(instr->HasEnvironment());
4820 LEnvironment* env = instr->environment();
4821 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4822 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004823}
4824
4825
4826void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4827 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg));
4828}
4829
4830
4831void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4832 Register object = ToRegister(instr->object());
4833 Register key = ToRegister(instr->key());
4834 Register strict = scratch0();
4835 __ li(strict, Operand(Smi::FromInt(strict_mode_flag())));
4836 __ Push(object, key, strict);
4837 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4838 LPointerMap* pointers = instr->pointer_map();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004839 RecordPosition(pointers->position());
erikcorry0ad885c2011-11-21 13:51:57 +00004840 SafepointGenerator safepoint_generator(
4841 this, pointers, Safepoint::kLazyDeopt);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004842 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
4843}
4844
4845
4846void LCodeGen::DoIn(LIn* instr) {
4847 Register obj = ToRegister(instr->object());
4848 Register key = ToRegister(instr->key());
4849 __ Push(key, obj);
4850 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4851 LPointerMap* pointers = instr->pointer_map();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004852 RecordPosition(pointers->position());
erikcorry0ad885c2011-11-21 13:51:57 +00004853 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004854 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4855}
4856
4857
4858void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
erikcorry0ad885c2011-11-21 13:51:57 +00004859 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4860 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4861 RecordSafepointWithLazyDeopt(
4862 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4863 ASSERT(instr->HasEnvironment());
4864 LEnvironment* env = instr->environment();
4865 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004866}
4867
4868
4869void LCodeGen::DoStackCheck(LStackCheck* instr) {
4870 class DeferredStackCheck: public LDeferredCode {
4871 public:
4872 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4873 : LDeferredCode(codegen), instr_(instr) { }
4874 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4875 virtual LInstruction* instr() { return instr_; }
4876 private:
4877 LStackCheck* instr_;
4878 };
4879
erikcorry0ad885c2011-11-21 13:51:57 +00004880 ASSERT(instr->HasEnvironment());
4881 LEnvironment* env = instr->environment();
4882 // There is no LLazyBailout instruction for stack-checks. We have to
4883 // prepare for lazy deoptimization explicitly here.
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004884 if (instr->hydrogen()->is_function_entry()) {
4885 // Perform stack overflow check.
4886 Label done;
4887 __ LoadRoot(at, Heap::kStackLimitRootIndex);
4888 __ Branch(&done, hs, sp, Operand(at));
4889 StackCheckStub stub;
4890 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
erikcorry0ad885c2011-11-21 13:51:57 +00004891 EnsureSpaceForLazyDeopt();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004892 __ bind(&done);
erikcorry0ad885c2011-11-21 13:51:57 +00004893 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4894 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004895 } else {
4896 ASSERT(instr->hydrogen()->is_backwards_branch());
4897 // Perform stack overflow check if this goto needs it before jumping.
4898 DeferredStackCheck* deferred_stack_check =
4899 new DeferredStackCheck(this, instr);
4900 __ LoadRoot(at, Heap::kStackLimitRootIndex);
4901 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at));
erikcorry0ad885c2011-11-21 13:51:57 +00004902 EnsureSpaceForLazyDeopt();
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004903 __ bind(instr->done_label());
4904 deferred_stack_check->SetExit(instr->done_label());
erikcorry0ad885c2011-11-21 13:51:57 +00004905 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4906 // Don't record a deoptimization index for the safepoint here.
4907 // This will be done explicitly when emitting call and the safepoint in
4908 // the deferred code.
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004909 }
4910}
4911
4912
4913void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4914 // This is a pseudo-instruction that ensures that the environment here is
4915 // properly registered for deoptimization and records the assembler's PC
4916 // offset.
4917 LEnvironment* environment = instr->environment();
4918 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4919 instr->SpilledDoubleRegisterArray());
4920
4921 // If the environment were already registered, we would have no way of
4922 // backpatching it with the spill slot operands.
4923 ASSERT(!environment->HasBeenRegistered());
erikcorry0ad885c2011-11-21 13:51:57 +00004924 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00004925 ASSERT(osr_pc_offset_ == -1);
4926 osr_pc_offset_ = masm()->pc_offset();
4927}
4928
4929
ulan@chromium.org812308e2012-02-29 15:58:45 +00004930void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
4931 Register result = ToRegister(instr->result());
4932 Register object = ToRegister(instr->object());
4933 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4934 DeoptimizeIf(eq, instr->environment(), object, Operand(at));
4935
4936 Register null_value = t1;
4937 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
4938 DeoptimizeIf(eq, instr->environment(), object, Operand(null_value));
4939
4940 __ And(at, object, kSmiTagMask);
4941 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
4942
4943 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
4944 __ GetObjectType(object, a1, a1);
4945 DeoptimizeIf(le, instr->environment(), a1, Operand(LAST_JS_PROXY_TYPE));
4946
4947 Label use_cache, call_runtime;
4948 ASSERT(object.is(a0));
4949 __ CheckEnumCache(null_value, &call_runtime);
4950
4951 __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset));
4952 __ Branch(&use_cache);
4953
4954 // Get the set of properties to enumerate.
4955 __ bind(&call_runtime);
4956 __ push(object);
4957 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
4958
4959 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
4960 ASSERT(result.is(v0));
4961 __ LoadRoot(at, Heap::kMetaMapRootIndex);
4962 DeoptimizeIf(ne, instr->environment(), a1, Operand(at));
4963 __ bind(&use_cache);
4964}
4965
4966
4967void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
4968 Register map = ToRegister(instr->map());
4969 Register result = ToRegister(instr->result());
4970 __ LoadInstanceDescriptors(map, result);
4971 __ lw(result,
4972 FieldMemOperand(result, DescriptorArray::kEnumerationIndexOffset));
4973 __ lw(result,
4974 FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
4975 DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
4976}
4977
4978
4979void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
4980 Register object = ToRegister(instr->value());
4981 Register map = ToRegister(instr->map());
4982 __ lw(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
4983 DeoptimizeIf(ne, instr->environment(), map, Operand(scratch0()));
4984}
4985
4986
4987void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
4988 Register object = ToRegister(instr->object());
4989 Register index = ToRegister(instr->index());
4990 Register result = ToRegister(instr->result());
4991 Register scratch = scratch0();
4992
4993 Label out_of_object, done;
4994 __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg));
4995 __ sll(scratch, index, kPointerSizeLog2 - kSmiTagSize); // In delay slot.
4996
4997 STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
4998 __ Addu(scratch, object, scratch);
4999 __ lw(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
5000
5001 __ Branch(&done);
5002
5003 __ bind(&out_of_object);
5004 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
5005 // Index is equal to negated out of object property index plus 1.
5006 __ Subu(scratch, result, scratch);
5007 __ lw(result, FieldMemOperand(scratch,
5008 FixedArray::kHeaderSize - kPointerSize));
5009 __ bind(&done);
5010}
5011
5012
jkummerow@chromium.orgc3b37122011-11-07 10:14:12 +00005013#undef __
5014
5015} } // namespace v8::internal