blob: 3dcd42781cd4655abc3c14074d90d0f91f0ac348 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
43 int deoptimization_index)
44 : codegen_(codegen),
45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { }
47 virtual ~SafepointGenerator() { }
48
Steve Block44f0eee2011-05-26 01:26:41 +010049 virtual void BeforeCall(int call_size) {
50 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65
66 virtual void AfterCall() {
Ben Murdochb0fe1622011-05-05 13:52:32 +010067 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
68 }
69
70 private:
71 LCodeGen* codegen_;
72 LPointerMap* pointers_;
73 int deoptimization_index_;
74};
75
76
77#define __ masm()->
78
79bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused());
82 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3);
84 CpuFeatures::Scope scope2(ARMv7);
85 return GeneratePrologue() &&
86 GenerateBody() &&
87 GenerateDeferredCode() &&
88 GenerateSafepointTable();
89}
90
91
92void LCodeGen::FinishCode(Handle<Code> code) {
93 ASSERT(is_done());
Steve Block053d10c2011-06-13 19:13:29 +010094 code->set_stack_slots(StackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010095 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010096 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010097 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010098}
99
100
101void LCodeGen::Abort(const char* format, ...) {
102 if (FLAG_trace_bailout) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100103 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
104 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100105 va_list arguments;
106 va_start(arguments, format);
107 OS::VPrint(format, arguments);
108 va_end(arguments);
109 PrintF("\n");
110 }
111 status_ = ABORTED;
112}
113
114
115void LCodeGen::Comment(const char* format, ...) {
116 if (!FLAG_code_comments) return;
117 char buffer[4 * KB];
118 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
119 va_list arguments;
120 va_start(arguments, format);
121 builder.AddFormattedList(format, arguments);
122 va_end(arguments);
123
124 // Copy the string before recording it in the assembler to avoid
125 // issues when the stack allocated buffer goes out of scope.
126 size_t length = builder.position();
127 Vector<char> copy = Vector<char>::New(length + 1);
128 memcpy(copy.start(), builder.Finalize(), copy.length());
129 masm()->RecordComment(copy.start());
130}
131
132
133bool LCodeGen::GeneratePrologue() {
134 ASSERT(is_generating());
135
136#ifdef DEBUG
137 if (strlen(FLAG_stop_at) > 0 &&
138 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
139 __ stop("stop_at");
140 }
141#endif
142
143 // r1: Callee's JS function.
144 // cp: Callee's context.
145 // fp: Caller's frame pointer.
146 // lr: Caller's pc.
147
148 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
149 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
150
151 // Reserve space for the stack slots needed by the code.
Steve Block053d10c2011-06-13 19:13:29 +0100152 int slots = StackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100153 if (slots > 0) {
154 if (FLAG_debug_code) {
155 __ mov(r0, Operand(slots));
156 __ mov(r2, Operand(kSlotsZapValue));
157 Label loop;
158 __ bind(&loop);
159 __ push(r2);
160 __ sub(r0, r0, Operand(1), SetCC);
161 __ b(ne, &loop);
162 } else {
163 __ sub(sp, sp, Operand(slots * kPointerSize));
164 }
165 }
166
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100167 // Possibly allocate a local context.
168 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
169 if (heap_slots > 0) {
170 Comment(";;; Allocate local context");
171 // Argument to NewContext is the function, which is in r1.
172 __ push(r1);
173 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
174 FastNewContextStub stub(heap_slots);
175 __ CallStub(&stub);
176 } else {
177 __ CallRuntime(Runtime::kNewContext, 1);
178 }
179 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
180 // Context is returned in both r0 and cp. It replaces the context
181 // passed to us. It's saved in the stack and kept live in cp.
182 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
183 // Copy any necessary parameters into the context.
184 int num_parameters = scope()->num_parameters();
185 for (int i = 0; i < num_parameters; i++) {
186 Slot* slot = scope()->parameter(i)->AsSlot();
187 if (slot != NULL && slot->type() == Slot::CONTEXT) {
188 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
189 (num_parameters - 1 - i) * kPointerSize;
190 // Load parameter from stack.
191 __ ldr(r0, MemOperand(fp, parameter_offset));
192 // Store it in the context.
193 __ mov(r1, Operand(Context::SlotOffset(slot->index())));
194 __ str(r0, MemOperand(cp, r1));
195 // Update the write barrier. This clobbers all involved
196 // registers, so we have to use two more registers to avoid
197 // clobbering cp.
198 __ mov(r2, Operand(cp));
199 __ RecordWrite(r2, Operand(r1), r3, r0);
200 }
201 }
202 Comment(";;; End allocate local context");
203 }
204
Ben Murdochb0fe1622011-05-05 13:52:32 +0100205 // Trace the call.
206 if (FLAG_trace) {
207 __ CallRuntime(Runtime::kTraceEnter, 0);
208 }
209 return !is_aborted();
210}
211
212
213bool LCodeGen::GenerateBody() {
214 ASSERT(is_generating());
215 bool emit_instructions = true;
216 for (current_instruction_ = 0;
217 !is_aborted() && current_instruction_ < instructions_->length();
218 current_instruction_++) {
219 LInstruction* instr = instructions_->at(current_instruction_);
220 if (instr->IsLabel()) {
221 LLabel* label = LLabel::cast(instr);
222 emit_instructions = !label->HasReplacement();
223 }
224
225 if (emit_instructions) {
226 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
227 instr->CompileToNative(this);
228 }
229 }
230 return !is_aborted();
231}
232
233
234LInstruction* LCodeGen::GetNextInstruction() {
235 if (current_instruction_ < instructions_->length() - 1) {
236 return instructions_->at(current_instruction_ + 1);
237 } else {
238 return NULL;
239 }
240}
241
242
243bool LCodeGen::GenerateDeferredCode() {
244 ASSERT(is_generating());
245 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
246 LDeferredCode* code = deferred_[i];
247 __ bind(code->entry());
248 code->Generate();
249 __ jmp(code->exit());
250 }
251
Ben Murdochb8e0da22011-05-16 14:20:40 +0100252 // Force constant pool emission at the end of deferred code to make
253 // sure that no constant pools are emitted after the official end of
254 // the instruction sequence.
255 masm()->CheckConstPool(true, false);
256
Ben Murdochb0fe1622011-05-05 13:52:32 +0100257 // Deferred code is the last part of the instruction sequence. Mark
258 // the generated code as done unless we bailed out.
259 if (!is_aborted()) status_ = DONE;
260 return !is_aborted();
261}
262
263
264bool LCodeGen::GenerateSafepointTable() {
265 ASSERT(is_done());
Steve Block053d10c2011-06-13 19:13:29 +0100266 safepoints_.Emit(masm(), StackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100267 return !is_aborted();
268}
269
270
271Register LCodeGen::ToRegister(int index) const {
272 return Register::FromAllocationIndex(index);
273}
274
275
276DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
277 return DoubleRegister::FromAllocationIndex(index);
278}
279
280
281Register LCodeGen::ToRegister(LOperand* op) const {
282 ASSERT(op->IsRegister());
283 return ToRegister(op->index());
284}
285
286
287Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
288 if (op->IsRegister()) {
289 return ToRegister(op->index());
290 } else if (op->IsConstantOperand()) {
291 __ mov(scratch, ToOperand(op));
292 return scratch;
293 } else if (op->IsStackSlot() || op->IsArgument()) {
294 __ ldr(scratch, ToMemOperand(op));
295 return scratch;
296 }
297 UNREACHABLE();
298 return scratch;
299}
300
301
302DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
303 ASSERT(op->IsDoubleRegister());
304 return ToDoubleRegister(op->index());
305}
306
307
308DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
309 SwVfpRegister flt_scratch,
310 DoubleRegister dbl_scratch) {
311 if (op->IsDoubleRegister()) {
312 return ToDoubleRegister(op->index());
313 } else if (op->IsConstantOperand()) {
314 LConstantOperand* const_op = LConstantOperand::cast(op);
315 Handle<Object> literal = chunk_->LookupLiteral(const_op);
316 Representation r = chunk_->LookupLiteralRepresentation(const_op);
317 if (r.IsInteger32()) {
318 ASSERT(literal->IsNumber());
319 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
320 __ vmov(flt_scratch, ip);
321 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
322 return dbl_scratch;
323 } else if (r.IsDouble()) {
324 Abort("unsupported double immediate");
325 } else if (r.IsTagged()) {
326 Abort("unsupported tagged immediate");
327 }
328 } else if (op->IsStackSlot() || op->IsArgument()) {
329 // TODO(regis): Why is vldr not taking a MemOperand?
330 // __ vldr(dbl_scratch, ToMemOperand(op));
331 MemOperand mem_op = ToMemOperand(op);
332 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
333 return dbl_scratch;
334 }
335 UNREACHABLE();
336 return dbl_scratch;
337}
338
339
340int LCodeGen::ToInteger32(LConstantOperand* op) const {
341 Handle<Object> value = chunk_->LookupLiteral(op);
342 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
343 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
344 value->Number());
345 return static_cast<int32_t>(value->Number());
346}
347
348
349Operand LCodeGen::ToOperand(LOperand* op) {
350 if (op->IsConstantOperand()) {
351 LConstantOperand* const_op = LConstantOperand::cast(op);
352 Handle<Object> literal = chunk_->LookupLiteral(const_op);
353 Representation r = chunk_->LookupLiteralRepresentation(const_op);
354 if (r.IsInteger32()) {
355 ASSERT(literal->IsNumber());
356 return Operand(static_cast<int32_t>(literal->Number()));
357 } else if (r.IsDouble()) {
358 Abort("ToOperand Unsupported double immediate.");
359 }
360 ASSERT(r.IsTagged());
361 return Operand(literal);
362 } else if (op->IsRegister()) {
363 return Operand(ToRegister(op));
364 } else if (op->IsDoubleRegister()) {
365 Abort("ToOperand IsDoubleRegister unimplemented");
366 return Operand(0);
367 }
368 // Stack slots not implemented, use ToMemOperand instead.
369 UNREACHABLE();
370 return Operand(0);
371}
372
373
374MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100375 ASSERT(!op->IsRegister());
376 ASSERT(!op->IsDoubleRegister());
377 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
378 int index = op->index();
379 if (index >= 0) {
380 // Local or spill slot. Skip the frame pointer, function, and
381 // context in the fixed part of the frame.
382 return MemOperand(fp, -(index + 3) * kPointerSize);
383 } else {
384 // Incoming parameter. Skip the return address.
385 return MemOperand(fp, -(index - 1) * kPointerSize);
386 }
387}
388
389
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100390MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
391 ASSERT(op->IsDoubleStackSlot());
392 int index = op->index();
393 if (index >= 0) {
394 // Local or spill slot. Skip the frame pointer, function, context,
395 // and the first word of the double in the fixed part of the frame.
396 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
397 } else {
398 // Incoming parameter. Skip the return address and the first word of
399 // the double.
400 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
401 }
402}
403
404
Ben Murdochb8e0da22011-05-16 14:20:40 +0100405void LCodeGen::WriteTranslation(LEnvironment* environment,
406 Translation* translation) {
407 if (environment == NULL) return;
408
409 // The translation includes one command per value in the environment.
410 int translation_size = environment->values()->length();
411 // The output frame height does not include the parameters.
412 int height = translation_size - environment->parameter_count();
413
414 WriteTranslation(environment->outer(), translation);
415 int closure_id = DefineDeoptimizationLiteral(environment->closure());
416 translation->BeginFrame(environment->ast_id(), closure_id, height);
417 for (int i = 0; i < translation_size; ++i) {
418 LOperand* value = environment->values()->at(i);
419 // spilled_registers_ and spilled_double_registers_ are either
420 // both NULL or both set.
421 if (environment->spilled_registers() != NULL && value != NULL) {
422 if (value->IsRegister() &&
423 environment->spilled_registers()[value->index()] != NULL) {
424 translation->MarkDuplicate();
425 AddToTranslation(translation,
426 environment->spilled_registers()[value->index()],
427 environment->HasTaggedValueAt(i));
428 } else if (
429 value->IsDoubleRegister() &&
430 environment->spilled_double_registers()[value->index()] != NULL) {
431 translation->MarkDuplicate();
432 AddToTranslation(
433 translation,
434 environment->spilled_double_registers()[value->index()],
435 false);
436 }
437 }
438
439 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
440 }
441}
442
443
Ben Murdochb0fe1622011-05-05 13:52:32 +0100444void LCodeGen::AddToTranslation(Translation* translation,
445 LOperand* op,
446 bool is_tagged) {
447 if (op == NULL) {
448 // TODO(twuerthinger): Introduce marker operands to indicate that this value
449 // is not present and must be reconstructed from the deoptimizer. Currently
450 // this is only used for the arguments object.
451 translation->StoreArgumentsObject();
452 } else if (op->IsStackSlot()) {
453 if (is_tagged) {
454 translation->StoreStackSlot(op->index());
455 } else {
456 translation->StoreInt32StackSlot(op->index());
457 }
458 } else if (op->IsDoubleStackSlot()) {
459 translation->StoreDoubleStackSlot(op->index());
460 } else if (op->IsArgument()) {
461 ASSERT(is_tagged);
Steve Block053d10c2011-06-13 19:13:29 +0100462 int src_index = StackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100463 translation->StoreStackSlot(src_index);
464 } else if (op->IsRegister()) {
465 Register reg = ToRegister(op);
466 if (is_tagged) {
467 translation->StoreRegister(reg);
468 } else {
469 translation->StoreInt32Register(reg);
470 }
471 } else if (op->IsDoubleRegister()) {
472 DoubleRegister reg = ToDoubleRegister(op);
473 translation->StoreDoubleRegister(reg);
474 } else if (op->IsConstantOperand()) {
475 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
476 int src_index = DefineDeoptimizationLiteral(literal);
477 translation->StoreLiteral(src_index);
478 } else {
479 UNREACHABLE();
480 }
481}
482
483
484void LCodeGen::CallCode(Handle<Code> code,
485 RelocInfo::Mode mode,
486 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100487 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
488}
489
490
491void LCodeGen::CallCodeGeneric(Handle<Code> code,
492 RelocInfo::Mode mode,
493 LInstruction* instr,
494 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100495 ASSERT(instr != NULL);
496 LPointerMap* pointers = instr->pointer_map();
497 RecordPosition(pointers->position());
498 __ Call(code, mode);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100499 RegisterLazyDeoptimization(instr, safepoint_mode);
Ben Murdoch18a6f572011-07-25 17:16:09 +0100500
501 // Signal that we don't inline smi code before these stubs in the
502 // optimizing code generator.
503 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
504 code->kind() == Code::COMPARE_IC) {
505 __ nop();
506 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100507}
508
509
Steve Block44f0eee2011-05-26 01:26:41 +0100510void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100511 int num_arguments,
512 LInstruction* instr) {
513 ASSERT(instr != NULL);
514 LPointerMap* pointers = instr->pointer_map();
515 ASSERT(pointers != NULL);
516 RecordPosition(pointers->position());
517
518 __ CallRuntime(function, num_arguments);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100519 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100520}
521
522
Ben Murdoch8b112d22011-06-08 16:22:53 +0100523void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
524 int argc,
525 LInstruction* instr) {
526 __ CallRuntimeSaveDoubles(id);
527 RecordSafepointWithRegisters(
528 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
529}
530
531
532void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
533 SafepointMode safepoint_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100534 // Create the environment to bailout to. If the call has side effects
535 // execution has to continue after the call otherwise execution can continue
536 // from a previous bailout point repeating the call.
537 LEnvironment* deoptimization_environment;
538 if (instr->HasDeoptimizationEnvironment()) {
539 deoptimization_environment = instr->deoptimization_environment();
540 } else {
541 deoptimization_environment = instr->environment();
542 }
543
544 RegisterEnvironmentForDeoptimization(deoptimization_environment);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100545 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
546 RecordSafepoint(instr->pointer_map(),
547 deoptimization_environment->deoptimization_index());
548 } else {
549 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
550 RecordSafepointWithRegisters(
551 instr->pointer_map(),
552 0,
553 deoptimization_environment->deoptimization_index());
554 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100555}
556
557
558void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
559 if (!environment->HasBeenRegistered()) {
560 // Physical stack frame layout:
561 // -x ............. -4 0 ..................................... y
562 // [incoming arguments] [spill slots] [pushed outgoing arguments]
563
564 // Layout of the environment:
565 // 0 ..................................................... size-1
566 // [parameters] [locals] [expression stack including arguments]
567
568 // Layout of the translation:
569 // 0 ........................................................ size - 1 + 4
570 // [expression stack including arguments] [locals] [4 words] [parameters]
571 // |>------------ translation_size ------------<|
572
573 int frame_count = 0;
574 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
575 ++frame_count;
576 }
577 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100578 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100579 int deoptimization_index = deoptimizations_.length();
580 environment->Register(deoptimization_index, translation.index());
581 deoptimizations_.Add(environment);
582 }
583}
584
585
586void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
587 RegisterEnvironmentForDeoptimization(environment);
588 ASSERT(environment->HasBeenRegistered());
589 int id = environment->deoptimization_index();
590 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
591 ASSERT(entry != NULL);
592 if (entry == NULL) {
593 Abort("bailout was not prepared");
594 return;
595 }
596
597 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
598
599 if (FLAG_deopt_every_n_times == 1 &&
600 info_->shared_info()->opt_count() == id) {
601 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
602 return;
603 }
604
Steve Block1e0659c2011-05-24 12:43:12 +0100605 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100606 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt");
607 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
608 } else {
609 if (FLAG_trap_on_deopt) {
610 Label done;
611 __ b(&done, NegateCondition(cc));
612 __ stop("trap_on_deopt");
613 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
614 __ bind(&done);
615 } else {
616 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc);
617 }
618 }
619}
620
621
622void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
623 int length = deoptimizations_.length();
624 if (length == 0) return;
625 ASSERT(FLAG_deopt);
626 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100627 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100628
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100629 Handle<ByteArray> translations = translations_.CreateByteArray();
630 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100631 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
632
633 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100634 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100635 for (int i = 0; i < deoptimization_literals_.length(); i++) {
636 literals->set(i, *deoptimization_literals_[i]);
637 }
638 data->SetLiteralArray(*literals);
639
640 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
641 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
642
643 // Populate the deoptimization entries.
644 for (int i = 0; i < length; i++) {
645 LEnvironment* env = deoptimizations_[i];
646 data->SetAstId(i, Smi::FromInt(env->ast_id()));
647 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
648 data->SetArgumentsStackHeight(i,
649 Smi::FromInt(env->arguments_stack_height()));
650 }
651 code->set_deoptimization_data(*data);
652}
653
654
655int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
656 int result = deoptimization_literals_.length();
657 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
658 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
659 }
660 deoptimization_literals_.Add(literal);
661 return result;
662}
663
664
665void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
666 ASSERT(deoptimization_literals_.length() == 0);
667
668 const ZoneList<Handle<JSFunction> >* inlined_closures =
669 chunk()->inlined_closures();
670
671 for (int i = 0, length = inlined_closures->length();
672 i < length;
673 i++) {
674 DefineDeoptimizationLiteral(inlined_closures->at(i));
675 }
676
677 inlined_function_count_ = deoptimization_literals_.length();
678}
679
680
Steve Block1e0659c2011-05-24 12:43:12 +0100681void LCodeGen::RecordSafepoint(
682 LPointerMap* pointers,
683 Safepoint::Kind kind,
684 int arguments,
685 int deoptimization_index) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100686 ASSERT(expected_safepoint_kind_ == kind);
687
Ben Murdochb0fe1622011-05-05 13:52:32 +0100688 const ZoneList<LOperand*>* operands = pointers->operands();
689 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100690 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100691 for (int i = 0; i < operands->length(); i++) {
692 LOperand* pointer = operands->at(i);
693 if (pointer->IsStackSlot()) {
694 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100695 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
696 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100697 }
698 }
Steve Block1e0659c2011-05-24 12:43:12 +0100699 if (kind & Safepoint::kWithRegisters) {
700 // Register cp always contains a pointer to the context.
701 safepoint.DefinePointerRegister(cp);
702 }
703}
704
705
706void LCodeGen::RecordSafepoint(LPointerMap* pointers,
707 int deoptimization_index) {
708 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100709}
710
711
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100712void LCodeGen::RecordSafepoint(int deoptimization_index) {
713 LPointerMap empty_pointers(RelocInfo::kNoPosition);
714 RecordSafepoint(&empty_pointers, deoptimization_index);
715}
716
717
Ben Murdochb0fe1622011-05-05 13:52:32 +0100718void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
719 int arguments,
720 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100721 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
722 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100723}
724
725
Ben Murdochb8e0da22011-05-16 14:20:40 +0100726void LCodeGen::RecordSafepointWithRegistersAndDoubles(
727 LPointerMap* pointers,
728 int arguments,
729 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100730 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
731 deoptimization_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100732}
733
734
Ben Murdochb0fe1622011-05-05 13:52:32 +0100735void LCodeGen::RecordPosition(int position) {
736 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
737 masm()->positions_recorder()->RecordPosition(position);
738}
739
740
741void LCodeGen::DoLabel(LLabel* label) {
742 if (label->is_loop_header()) {
743 Comment(";;; B%d - LOOP entry", label->block_id());
744 } else {
745 Comment(";;; B%d", label->block_id());
746 }
747 __ bind(label->label());
748 current_block_ = label->block_id();
749 LCodeGen::DoGap(label);
750}
751
752
753void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100754 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100755}
756
757
758void LCodeGen::DoGap(LGap* gap) {
759 for (int i = LGap::FIRST_INNER_POSITION;
760 i <= LGap::LAST_INNER_POSITION;
761 i++) {
762 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
763 LParallelMove* move = gap->GetParallelMove(inner_pos);
764 if (move != NULL) DoParallelMove(move);
765 }
766
767 LInstruction* next = GetNextInstruction();
768 if (next != NULL && next->IsLazyBailout()) {
769 int pc = masm()->pc_offset();
770 safepoints_.SetPcAfterGap(pc);
771 }
772}
773
774
775void LCodeGen::DoParameter(LParameter* instr) {
776 // Nothing to do.
777}
778
779
780void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100781 ASSERT(ToRegister(instr->result()).is(r0));
782 switch (instr->hydrogen()->major_key()) {
783 case CodeStub::RegExpConstructResult: {
784 RegExpConstructResultStub stub;
785 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
786 break;
787 }
788 case CodeStub::RegExpExec: {
789 RegExpExecStub stub;
790 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
791 break;
792 }
793 case CodeStub::SubString: {
794 SubStringStub stub;
795 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
796 break;
797 }
Steve Block9fac8402011-05-12 15:51:54 +0100798 case CodeStub::NumberToString: {
799 NumberToStringStub stub;
800 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
801 break;
802 }
803 case CodeStub::StringAdd: {
804 StringAddStub stub(NO_STRING_ADD_FLAGS);
805 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
806 break;
807 }
808 case CodeStub::StringCompare: {
809 StringCompareStub stub;
810 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
811 break;
812 }
813 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100814 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100815 TranscendentalCacheStub stub(instr->transcendental_type(),
816 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100817 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100818 break;
819 }
820 default:
821 UNREACHABLE();
822 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100823}
824
825
826void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
827 // Nothing to do.
828}
829
830
831void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100832 if (instr->hydrogen()->HasPowerOf2Divisor()) {
833 Register dividend = ToRegister(instr->InputAt(0));
834
835 int32_t divisor =
836 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
837
838 if (divisor < 0) divisor = -divisor;
839
840 Label positive_dividend, done;
841 __ cmp(dividend, Operand(0));
842 __ b(pl, &positive_dividend);
843 __ rsb(dividend, dividend, Operand(0));
844 __ and_(dividend, dividend, Operand(divisor - 1));
845 __ rsb(dividend, dividend, Operand(0), SetCC);
846 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
847 __ b(ne, &done);
848 DeoptimizeIf(al, instr->environment());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +0100849 } else {
850 __ b(&done);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100851 }
Steve Block44f0eee2011-05-26 01:26:41 +0100852 __ bind(&positive_dividend);
853 __ and_(dividend, dividend, Operand(divisor - 1));
854 __ bind(&done);
855 return;
856 }
857
Ben Murdochb8e0da22011-05-16 14:20:40 +0100858 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100859 Register left = ToRegister(instr->InputAt(0));
860 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100861 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100862
Steve Block44f0eee2011-05-26 01:26:41 +0100863 Register scratch = scratch0();
864 Register scratch2 = ToRegister(instr->TempAt(0));
865 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
866 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
867 DwVfpRegister quotient = double_scratch0();
868
869 ASSERT(result.is(left));
870
871 ASSERT(!dividend.is(divisor));
872 ASSERT(!dividend.is(quotient));
873 ASSERT(!divisor.is(quotient));
874 ASSERT(!scratch.is(left));
875 ASSERT(!scratch.is(right));
876 ASSERT(!scratch.is(result));
877
878 Label done, vfp_modulo, both_positive, right_negative;
879
Ben Murdochb8e0da22011-05-16 14:20:40 +0100880 // Check for x % 0.
881 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100882 __ cmp(right, Operand(0));
883 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100884 }
885
Steve Block44f0eee2011-05-26 01:26:41 +0100886 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100887 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100888 __ b(eq, &done);
889 // Preload right in a vfp register.
890 __ vmov(divisor.low(), right);
891 __ b(lt, &vfp_modulo);
892
893 __ cmp(left, Operand(right));
894 __ b(lt, &done);
895
896 // Check for (positive) power of two on the right hand side.
897 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
898 scratch,
899 &right_negative,
900 &both_positive);
901 // Perform modulo operation (scratch contains right - 1).
902 __ and_(result, scratch, Operand(left));
903 __ b(&done);
904
905 __ bind(&right_negative);
906 // Negate right. The sign of the divisor does not matter.
907 __ rsb(right, right, Operand(0));
908
909 __ bind(&both_positive);
910 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100911 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100912 // left hand side, the left hand side is the result.
913 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100914 __ mov(scratch, left);
915 for (int i = 0; i < kUnfolds; i++) {
916 // Check if the left hand side is less or equal than the
917 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100918 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100919 __ mov(result, scratch, LeaveCC, lt);
920 __ b(lt, &done);
921 // If not, reduce the left hand side by the right hand
922 // side and check again.
923 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
924 }
925
Steve Block44f0eee2011-05-26 01:26:41 +0100926 __ bind(&vfp_modulo);
927 // Load the arguments in VFP registers.
928 // The divisor value is preloaded before. Be careful that 'right' is only live
929 // on entry.
930 __ vmov(dividend.low(), left);
931 // From here on don't use right as it may have been reallocated (for example
932 // to scratch2).
933 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +0100934
Steve Block44f0eee2011-05-26 01:26:41 +0100935 __ vcvt_f64_s32(dividend, dividend.low());
936 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100937
Steve Block44f0eee2011-05-26 01:26:41 +0100938 // We do not care about the sign of the divisor.
939 __ vabs(divisor, divisor);
940 // Compute the quotient and round it to a 32bit integer.
941 __ vdiv(quotient, dividend, divisor);
942 __ vcvt_s32_f64(quotient.low(), quotient);
943 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100944
Steve Block44f0eee2011-05-26 01:26:41 +0100945 // Compute the remainder in result.
946 DwVfpRegister double_scratch = dividend;
947 __ vmul(double_scratch, divisor, quotient);
948 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
949 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100950
Steve Block44f0eee2011-05-26 01:26:41 +0100951 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
952 __ sub(result, left, scratch);
953 } else {
954 Label ok;
955 // Check for -0.
956 __ sub(scratch2, left, scratch, SetCC);
957 __ b(ne, &ok);
958 __ cmp(left, Operand(0));
959 DeoptimizeIf(mi, instr->environment());
960 __ bind(&ok);
961 // Load the result and we are done.
962 __ mov(result, scratch2);
963 }
964
Ben Murdochb8e0da22011-05-16 14:20:40 +0100965 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100966}
967
968
969void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100970 class DeferredDivI: public LDeferredCode {
971 public:
972 DeferredDivI(LCodeGen* codegen, LDivI* instr)
973 : LDeferredCode(codegen), instr_(instr) { }
974 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100975 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100976 }
977 private:
978 LDivI* instr_;
979 };
980
Steve Block1e0659c2011-05-24 12:43:12 +0100981 const Register left = ToRegister(instr->InputAt(0));
982 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100983 const Register scratch = scratch0();
984 const Register result = ToRegister(instr->result());
985
986 // Check for x / 0.
987 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100988 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100989 DeoptimizeIf(eq, instr->environment());
990 }
991
992 // Check for (0 / -x) that will produce negative zero.
993 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
994 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +0100995 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100996 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +0100997 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100998 DeoptimizeIf(mi, instr->environment());
999 __ bind(&left_not_zero);
1000 }
1001
1002 // Check for (-kMinInt / -1).
1003 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1004 Label left_not_min_int;
1005 __ cmp(left, Operand(kMinInt));
1006 __ b(ne, &left_not_min_int);
1007 __ cmp(right, Operand(-1));
1008 DeoptimizeIf(eq, instr->environment());
1009 __ bind(&left_not_min_int);
1010 }
1011
1012 Label done, deoptimize;
1013 // Test for a few common cases first.
1014 __ cmp(right, Operand(1));
1015 __ mov(result, left, LeaveCC, eq);
1016 __ b(eq, &done);
1017
1018 __ cmp(right, Operand(2));
1019 __ tst(left, Operand(1), eq);
1020 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1021 __ b(eq, &done);
1022
1023 __ cmp(right, Operand(4));
1024 __ tst(left, Operand(3), eq);
1025 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1026 __ b(eq, &done);
1027
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001028 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001029 // to be tagged to Smis. If that is not possible, deoptimize.
1030 DeferredDivI* deferred = new DeferredDivI(this, instr);
1031
1032 __ TrySmiTag(left, &deoptimize, scratch);
1033 __ TrySmiTag(right, &deoptimize, scratch);
1034
1035 __ b(al, deferred->entry());
1036 __ bind(deferred->exit());
1037
1038 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001039 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001040 __ SmiUntag(result);
1041 __ b(&done);
1042
1043 __ bind(&deoptimize);
1044 DeoptimizeIf(al, instr->environment());
1045 __ bind(&done);
1046}
1047
1048
Steve Block1e0659c2011-05-24 12:43:12 +01001049template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001050void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1051 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001052 Register left = ToRegister(instr->InputAt(0));
1053 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001054
Ben Murdoch8b112d22011-06-08 16:22:53 +01001055 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001056 // Move left to r1 and right to r0 for the stub call.
1057 if (left.is(r1)) {
1058 __ Move(r0, right);
1059 } else if (left.is(r0) && right.is(r1)) {
1060 __ Swap(r0, r1, r2);
1061 } else if (left.is(r0)) {
1062 ASSERT(!right.is(r1));
1063 __ mov(r1, r0);
1064 __ mov(r0, right);
1065 } else {
1066 ASSERT(!left.is(r0) && !right.is(r0));
1067 __ mov(r0, right);
1068 __ mov(r1, left);
1069 }
1070 TypeRecordingBinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001071 __ CallStub(&stub);
1072 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1073 0,
1074 Safepoint::kNoDeoptimizationIndex);
1075 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001076 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001077}
1078
1079
1080void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001081 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001082 Register left = ToRegister(instr->InputAt(0));
1083 Register right = EmitLoadRegister(instr->InputAt(1), scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001084
1085 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
Steve Block1e0659c2011-05-24 12:43:12 +01001086 !instr->InputAt(1)->IsConstantOperand()) {
1087 __ orr(ToRegister(instr->TempAt(0)), left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001088 }
1089
1090 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1091 // scratch:left = left * right.
Steve Block1e0659c2011-05-24 12:43:12 +01001092 __ smull(left, scratch, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001093 __ mov(ip, Operand(left, ASR, 31));
1094 __ cmp(ip, Operand(scratch));
1095 DeoptimizeIf(ne, instr->environment());
1096 } else {
1097 __ mul(left, left, right);
1098 }
1099
1100 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1101 // Bail out if the result is supposed to be negative zero.
1102 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01001103 __ cmp(left, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001104 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001105 if (instr->InputAt(1)->IsConstantOperand()) {
1106 if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) <= 0) {
1107 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001108 }
1109 } else {
1110 // Test the non-zero operand for negative sign.
Steve Block1e0659c2011-05-24 12:43:12 +01001111 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001112 DeoptimizeIf(mi, instr->environment());
1113 }
1114 __ bind(&done);
1115 }
1116}
1117
1118
1119void LCodeGen::DoBitI(LBitI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001120 LOperand* left = instr->InputAt(0);
1121 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001122 ASSERT(left->Equals(instr->result()));
1123 ASSERT(left->IsRegister());
1124 Register result = ToRegister(left);
Steve Block44f0eee2011-05-26 01:26:41 +01001125 Operand right_operand(no_reg);
1126
1127 if (right->IsStackSlot() || right->IsArgument()) {
1128 Register right_reg = EmitLoadRegister(right, ip);
1129 right_operand = Operand(right_reg);
1130 } else {
1131 ASSERT(right->IsRegister() || right->IsConstantOperand());
1132 right_operand = ToOperand(right);
1133 }
1134
Ben Murdochb0fe1622011-05-05 13:52:32 +01001135 switch (instr->op()) {
1136 case Token::BIT_AND:
Steve Block44f0eee2011-05-26 01:26:41 +01001137 __ and_(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001138 break;
1139 case Token::BIT_OR:
Steve Block44f0eee2011-05-26 01:26:41 +01001140 __ orr(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001141 break;
1142 case Token::BIT_XOR:
Steve Block44f0eee2011-05-26 01:26:41 +01001143 __ eor(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001144 break;
1145 default:
1146 UNREACHABLE();
1147 break;
1148 }
1149}
1150
1151
1152void LCodeGen::DoShiftI(LShiftI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001153 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001154 LOperand* left = instr->InputAt(0);
1155 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001156 ASSERT(left->Equals(instr->result()));
1157 ASSERT(left->IsRegister());
1158 Register result = ToRegister(left);
1159 if (right->IsRegister()) {
1160 // Mask the right operand.
Steve Block9fac8402011-05-12 15:51:54 +01001161 __ and_(scratch, ToRegister(right), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001162 switch (instr->op()) {
1163 case Token::SAR:
Steve Block9fac8402011-05-12 15:51:54 +01001164 __ mov(result, Operand(result, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001165 break;
1166 case Token::SHR:
1167 if (instr->can_deopt()) {
Steve Block9fac8402011-05-12 15:51:54 +01001168 __ mov(result, Operand(result, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001169 DeoptimizeIf(mi, instr->environment());
1170 } else {
Steve Block9fac8402011-05-12 15:51:54 +01001171 __ mov(result, Operand(result, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001172 }
1173 break;
1174 case Token::SHL:
Steve Block9fac8402011-05-12 15:51:54 +01001175 __ mov(result, Operand(result, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001176 break;
1177 default:
1178 UNREACHABLE();
1179 break;
1180 }
1181 } else {
1182 int value = ToInteger32(LConstantOperand::cast(right));
1183 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1184 switch (instr->op()) {
1185 case Token::SAR:
1186 if (shift_count != 0) {
1187 __ mov(result, Operand(result, ASR, shift_count));
1188 }
1189 break;
1190 case Token::SHR:
1191 if (shift_count == 0 && instr->can_deopt()) {
1192 __ tst(result, Operand(0x80000000));
1193 DeoptimizeIf(ne, instr->environment());
1194 } else {
1195 __ mov(result, Operand(result, LSR, shift_count));
1196 }
1197 break;
1198 case Token::SHL:
1199 if (shift_count != 0) {
1200 __ mov(result, Operand(result, LSL, shift_count));
1201 }
1202 break;
1203 default:
1204 UNREACHABLE();
1205 break;
1206 }
1207 }
1208}
1209
1210
1211void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001212 LOperand* left = instr->InputAt(0);
1213 LOperand* right = instr->InputAt(1);
1214 ASSERT(left->Equals(instr->result()));
1215 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1216 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1217
1218 if (right->IsStackSlot() || right->IsArgument()) {
1219 Register right_reg = EmitLoadRegister(right, ip);
1220 __ sub(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1221 } else {
1222 ASSERT(right->IsRegister() || right->IsConstantOperand());
1223 __ sub(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1224 }
1225
1226 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001227 DeoptimizeIf(vs, instr->environment());
1228 }
1229}
1230
1231
1232void LCodeGen::DoConstantI(LConstantI* instr) {
1233 ASSERT(instr->result()->IsRegister());
1234 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1235}
1236
1237
1238void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001239 ASSERT(instr->result()->IsDoubleRegister());
1240 DwVfpRegister result = ToDoubleRegister(instr->result());
1241 double v = instr->value();
1242 __ vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001243}
1244
1245
1246void LCodeGen::DoConstantT(LConstantT* instr) {
1247 ASSERT(instr->result()->IsRegister());
1248 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1249}
1250
1251
Steve Block9fac8402011-05-12 15:51:54 +01001252void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001253 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001254 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001255 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1256}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001257
Ben Murdochb0fe1622011-05-05 13:52:32 +01001258
Steve Block44f0eee2011-05-26 01:26:41 +01001259void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001260 Register result = ToRegister(instr->result());
1261 Register array = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01001262 __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01001263}
1264
1265
Steve Block9fac8402011-05-12 15:51:54 +01001266void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1267 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001268 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001269 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001270}
1271
1272
1273void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001274 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001275 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001276 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001277 ASSERT(input.is(result));
1278 Label done;
1279
1280 // If the object is a smi return the object.
1281 __ tst(input, Operand(kSmiTagMask));
1282 __ b(eq, &done);
1283
1284 // If the object is not a value type, return the object.
1285 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1286 __ b(ne, &done);
1287 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1288
1289 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001290}
1291
1292
1293void LCodeGen::DoBitNotI(LBitNotI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001294 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001295 ASSERT(input->Equals(instr->result()));
1296 __ mvn(ToRegister(input), Operand(ToRegister(input)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001297}
1298
1299
1300void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001301 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001302 __ push(input_reg);
1303 CallRuntime(Runtime::kThrow, 1, instr);
1304
1305 if (FLAG_debug_code) {
1306 __ stop("Unreachable code.");
1307 }
1308}
1309
1310
1311void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001312 LOperand* left = instr->InputAt(0);
1313 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001314 ASSERT(left->Equals(instr->result()));
Steve Block44f0eee2011-05-26 01:26:41 +01001315 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1316 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001317
Steve Block44f0eee2011-05-26 01:26:41 +01001318 if (right->IsStackSlot() || right->IsArgument()) {
1319 Register right_reg = EmitLoadRegister(right, ip);
1320 __ add(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1321 } else {
1322 ASSERT(right->IsRegister() || right->IsConstantOperand());
1323 __ add(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1324 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001325
Steve Block44f0eee2011-05-26 01:26:41 +01001326 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001327 DeoptimizeIf(vs, instr->environment());
1328 }
1329}
1330
1331
1332void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001333 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1334 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001335 switch (instr->op()) {
1336 case Token::ADD:
1337 __ vadd(left, left, right);
1338 break;
1339 case Token::SUB:
1340 __ vsub(left, left, right);
1341 break;
1342 case Token::MUL:
1343 __ vmul(left, left, right);
1344 break;
1345 case Token::DIV:
1346 __ vdiv(left, left, right);
1347 break;
1348 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001349 // Save r0-r3 on the stack.
1350 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1351
1352 __ PrepareCallCFunction(4, scratch0());
1353 __ vmov(r0, r1, left);
1354 __ vmov(r2, r3, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001355 __ CallCFunction(
1356 ExternalReference::double_fp_operation(Token::MOD, isolate()), 4);
Steve Block1e0659c2011-05-24 12:43:12 +01001357 // Move the result in the double result register.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001358 __ GetCFunctionDoubleResult(ToDoubleRegister(instr->result()));
Steve Block1e0659c2011-05-24 12:43:12 +01001359
1360 // Restore r0-r3.
1361 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001362 break;
1363 }
1364 default:
1365 UNREACHABLE();
1366 break;
1367 }
1368}
1369
1370
1371void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001372 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1373 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001374 ASSERT(ToRegister(instr->result()).is(r0));
1375
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001376 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001377 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001378 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001379}
1380
1381
1382int LCodeGen::GetNextEmittedBlock(int block) {
1383 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1384 LLabel* label = chunk_->GetLabel(i);
1385 if (!label->HasReplacement()) return i;
1386 }
1387 return -1;
1388}
1389
1390
1391void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1392 int next_block = GetNextEmittedBlock(current_block_);
1393 right_block = chunk_->LookupDestination(right_block);
1394 left_block = chunk_->LookupDestination(left_block);
1395
1396 if (right_block == left_block) {
1397 EmitGoto(left_block);
1398 } else if (left_block == next_block) {
1399 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1400 } else if (right_block == next_block) {
1401 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1402 } else {
1403 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1404 __ b(chunk_->GetAssemblyLabel(right_block));
1405 }
1406}
1407
1408
1409void LCodeGen::DoBranch(LBranch* instr) {
1410 int true_block = chunk_->LookupDestination(instr->true_block_id());
1411 int false_block = chunk_->LookupDestination(instr->false_block_id());
1412
1413 Representation r = instr->hydrogen()->representation();
1414 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001415 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001416 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001417 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001418 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001419 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001420 Register scratch = scratch0();
1421
Ben Murdochb8e0da22011-05-16 14:20:40 +01001422 // Test the double value. Zero and NaN are false.
1423 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1424 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch6d7cb002011-08-04 19:25:22 +01001425 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001426 } else {
1427 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001428 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001429 if (instr->hydrogen()->type().IsBoolean()) {
1430 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1431 __ cmp(reg, ip);
1432 EmitBranch(true_block, false_block, eq);
1433 } else {
1434 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1435 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1436
1437 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1438 __ cmp(reg, ip);
1439 __ b(eq, false_label);
1440 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1441 __ cmp(reg, ip);
1442 __ b(eq, true_label);
1443 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1444 __ cmp(reg, ip);
1445 __ b(eq, false_label);
1446 __ cmp(reg, Operand(0));
1447 __ b(eq, false_label);
1448 __ tst(reg, Operand(kSmiTagMask));
1449 __ b(eq, true_label);
1450
Ben Murdochb8e0da22011-05-16 14:20:40 +01001451 // Test double values. Zero and NaN are false.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001452 Label call_stub;
1453 DoubleRegister dbl_scratch = d0;
Steve Block9fac8402011-05-12 15:51:54 +01001454 Register scratch = scratch0();
1455 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001456 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01001457 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001458 __ b(ne, &call_stub);
1459 __ sub(ip, reg, Operand(kHeapObjectTag));
1460 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001461 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1462 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001463 __ b(ne, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001464 __ b(true_label);
1465
1466 // The conversion stub doesn't cause garbage collections so it's
1467 // safe to not record a safepoint after the call.
1468 __ bind(&call_stub);
1469 ToBooleanStub stub(reg);
1470 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1471 __ stm(db_w, sp, saved_regs);
1472 __ CallStub(&stub);
1473 __ cmp(reg, Operand(0));
1474 __ ldm(ia_w, sp, saved_regs);
Steve Block1e0659c2011-05-24 12:43:12 +01001475 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001476 }
1477 }
1478}
1479
1480
1481void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001482 block = chunk_->LookupDestination(block);
1483 int next_block = GetNextEmittedBlock(current_block_);
1484 if (block != next_block) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001485 // Perform stack overflow check if this goto needs it before jumping.
1486 if (deferred_stack_check != NULL) {
1487 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1488 __ cmp(sp, Operand(ip));
1489 __ b(hs, chunk_->GetAssemblyLabel(block));
1490 __ jmp(deferred_stack_check->entry());
1491 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1492 } else {
1493 __ jmp(chunk_->GetAssemblyLabel(block));
1494 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001495 }
1496}
1497
1498
1499void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001500 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
1501 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001502}
1503
1504
1505void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001506 class DeferredStackCheck: public LDeferredCode {
1507 public:
1508 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1509 : LDeferredCode(codegen), instr_(instr) { }
1510 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1511 private:
1512 LGoto* instr_;
1513 };
1514
1515 DeferredStackCheck* deferred = NULL;
1516 if (instr->include_stack_check()) {
1517 deferred = new DeferredStackCheck(this, instr);
1518 }
1519 EmitGoto(instr->block_id(), deferred);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001520}
1521
1522
1523Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001524 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001525 switch (op) {
1526 case Token::EQ:
1527 case Token::EQ_STRICT:
1528 cond = eq;
1529 break;
1530 case Token::LT:
1531 cond = is_unsigned ? lo : lt;
1532 break;
1533 case Token::GT:
1534 cond = is_unsigned ? hi : gt;
1535 break;
1536 case Token::LTE:
1537 cond = is_unsigned ? ls : le;
1538 break;
1539 case Token::GTE:
1540 cond = is_unsigned ? hs : ge;
1541 break;
1542 case Token::IN:
1543 case Token::INSTANCEOF:
1544 default:
1545 UNREACHABLE();
1546 }
1547 return cond;
1548}
1549
1550
1551void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001552 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001553}
1554
1555
1556void LCodeGen::DoCmpID(LCmpID* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001557 LOperand* left = instr->InputAt(0);
1558 LOperand* right = instr->InputAt(1);
1559 LOperand* result = instr->result();
1560 Register scratch = scratch0();
1561
1562 Label unordered, done;
1563 if (instr->is_double()) {
1564 // Compare left and right as doubles and load the
1565 // resulting flags into the normal status register.
1566 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1567 // If a NaN is involved, i.e. the result is unordered (V set),
1568 // jump to unordered to return false.
1569 __ b(vs, &unordered);
1570 } else {
1571 EmitCmpI(left, right);
1572 }
1573
1574 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1575 __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1576 __ b(cc, &done);
1577
1578 __ bind(&unordered);
1579 __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1580 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001581}
1582
1583
1584void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001585 LOperand* left = instr->InputAt(0);
1586 LOperand* right = instr->InputAt(1);
1587 int false_block = chunk_->LookupDestination(instr->false_block_id());
1588 int true_block = chunk_->LookupDestination(instr->true_block_id());
1589
1590 if (instr->is_double()) {
1591 // Compare left and right as doubles and load the
1592 // resulting flags into the normal status register.
1593 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1594 // If a NaN is involved, i.e. the result is unordered (V set),
1595 // jump to false block label.
1596 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1597 } else {
1598 EmitCmpI(left, right);
1599 }
1600
1601 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1602 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001603}
1604
1605
1606void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001607 Register left = ToRegister(instr->InputAt(0));
1608 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001609 Register result = ToRegister(instr->result());
1610
1611 __ cmp(left, Operand(right));
1612 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1613 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001614}
1615
1616
1617void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001618 Register left = ToRegister(instr->InputAt(0));
1619 Register right = ToRegister(instr->InputAt(1));
1620 int false_block = chunk_->LookupDestination(instr->false_block_id());
1621 int true_block = chunk_->LookupDestination(instr->true_block_id());
1622
1623 __ cmp(left, Operand(right));
1624 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001625}
1626
1627
1628void LCodeGen::DoIsNull(LIsNull* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001629 Register reg = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001630 Register result = ToRegister(instr->result());
1631
1632 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1633 __ cmp(reg, ip);
1634 if (instr->is_strict()) {
1635 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1636 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1637 } else {
1638 Label true_value, false_value, done;
1639 __ b(eq, &true_value);
1640 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1641 __ cmp(ip, reg);
1642 __ b(eq, &true_value);
1643 __ tst(reg, Operand(kSmiTagMask));
1644 __ b(eq, &false_value);
1645 // Check for undetectable objects by looking in the bit field in
1646 // the map. The object has already been smi checked.
1647 Register scratch = result;
1648 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1649 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1650 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1651 __ b(ne, &true_value);
1652 __ bind(&false_value);
1653 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1654 __ jmp(&done);
1655 __ bind(&true_value);
1656 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1657 __ bind(&done);
1658 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001659}
1660
1661
1662void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001663 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001664 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001665
1666 // TODO(fsc): If the expression is known to be a smi, then it's
1667 // definitely not null. Jump to the false block.
1668
1669 int true_block = chunk_->LookupDestination(instr->true_block_id());
1670 int false_block = chunk_->LookupDestination(instr->false_block_id());
1671
1672 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1673 __ cmp(reg, ip);
1674 if (instr->is_strict()) {
1675 EmitBranch(true_block, false_block, eq);
1676 } else {
1677 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1678 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1679 __ b(eq, true_label);
1680 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1681 __ cmp(reg, ip);
1682 __ b(eq, true_label);
1683 __ tst(reg, Operand(kSmiTagMask));
1684 __ b(eq, false_label);
1685 // Check for undetectable objects by looking in the bit field in
1686 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001687 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1688 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1689 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1690 EmitBranch(true_block, false_block, ne);
1691 }
1692}
1693
1694
1695Condition LCodeGen::EmitIsObject(Register input,
1696 Register temp1,
1697 Register temp2,
1698 Label* is_not_object,
1699 Label* is_object) {
Steve Block1e0659c2011-05-24 12:43:12 +01001700 __ JumpIfSmi(input, is_not_object);
1701
1702 __ LoadRoot(temp1, Heap::kNullValueRootIndex);
1703 __ cmp(input, temp1);
1704 __ b(eq, is_object);
1705
1706 // Load map.
1707 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1708 // Undetectable objects behave like undefined.
1709 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1710 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1711 __ b(ne, is_not_object);
1712
1713 // Load instance type and check that it is in object type range.
1714 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
1715 __ cmp(temp2, Operand(FIRST_JS_OBJECT_TYPE));
1716 __ b(lt, is_not_object);
1717 __ cmp(temp2, Operand(LAST_JS_OBJECT_TYPE));
1718 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001719}
1720
1721
1722void LCodeGen::DoIsObject(LIsObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001723 Register reg = ToRegister(instr->InputAt(0));
1724 Register result = ToRegister(instr->result());
1725 Register temp = scratch0();
1726 Label is_false, is_true, done;
1727
1728 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1729 __ b(true_cond, &is_true);
1730
1731 __ bind(&is_false);
1732 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1733 __ b(&done);
1734
1735 __ bind(&is_true);
1736 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1737
1738 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001739}
1740
1741
1742void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001743 Register reg = ToRegister(instr->InputAt(0));
1744 Register temp1 = ToRegister(instr->TempAt(0));
1745 Register temp2 = scratch0();
1746
1747 int true_block = chunk_->LookupDestination(instr->true_block_id());
1748 int false_block = chunk_->LookupDestination(instr->false_block_id());
1749 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1750 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1751
1752 Condition true_cond =
1753 EmitIsObject(reg, temp1, temp2, false_label, true_label);
1754
1755 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001756}
1757
1758
1759void LCodeGen::DoIsSmi(LIsSmi* instr) {
1760 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1761 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001762 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001763 __ tst(input_reg, Operand(kSmiTagMask));
1764 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1765 Label done;
1766 __ b(eq, &done);
1767 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1768 __ bind(&done);
1769}
1770
1771
1772void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1773 int true_block = chunk_->LookupDestination(instr->true_block_id());
1774 int false_block = chunk_->LookupDestination(instr->false_block_id());
1775
Steve Block1e0659c2011-05-24 12:43:12 +01001776 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001777 __ tst(input_reg, Operand(kSmiTagMask));
1778 EmitBranch(true_block, false_block, eq);
1779}
1780
1781
Steve Block1e0659c2011-05-24 12:43:12 +01001782static InstanceType TestType(HHasInstanceType* instr) {
1783 InstanceType from = instr->from();
1784 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001785 if (from == FIRST_TYPE) return to;
1786 ASSERT(from == to || to == LAST_TYPE);
1787 return from;
1788}
1789
1790
Steve Block1e0659c2011-05-24 12:43:12 +01001791static Condition BranchCondition(HHasInstanceType* instr) {
1792 InstanceType from = instr->from();
1793 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001794 if (from == to) return eq;
1795 if (to == LAST_TYPE) return hs;
1796 if (from == FIRST_TYPE) return ls;
1797 UNREACHABLE();
1798 return eq;
1799}
1800
1801
1802void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001803 Register input = ToRegister(instr->InputAt(0));
1804 Register result = ToRegister(instr->result());
1805
1806 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1807 Label done;
1808 __ tst(input, Operand(kSmiTagMask));
1809 __ LoadRoot(result, Heap::kFalseValueRootIndex, eq);
1810 __ b(eq, &done);
1811 __ CompareObjectType(input, result, result, TestType(instr->hydrogen()));
1812 Condition cond = BranchCondition(instr->hydrogen());
1813 __ LoadRoot(result, Heap::kTrueValueRootIndex, cond);
1814 __ LoadRoot(result, Heap::kFalseValueRootIndex, NegateCondition(cond));
1815 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001816}
1817
1818
1819void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001820 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001821 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001822
1823 int true_block = chunk_->LookupDestination(instr->true_block_id());
1824 int false_block = chunk_->LookupDestination(instr->false_block_id());
1825
1826 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1827
1828 __ tst(input, Operand(kSmiTagMask));
1829 __ b(eq, false_label);
1830
Steve Block1e0659c2011-05-24 12:43:12 +01001831 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1832 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001833}
1834
1835
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001836void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1837 Register input = ToRegister(instr->InputAt(0));
1838 Register result = ToRegister(instr->result());
1839
1840 if (FLAG_debug_code) {
1841 __ AbortIfNotString(input);
1842 }
1843
1844 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1845 __ IndexFromHash(result, result);
1846}
1847
1848
Ben Murdochb0fe1622011-05-05 13:52:32 +01001849void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001850 Register input = ToRegister(instr->InputAt(0));
1851 Register result = ToRegister(instr->result());
1852 Register scratch = scratch0();
1853
1854 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1855 __ ldr(scratch,
1856 FieldMemOperand(input, String::kHashFieldOffset));
1857 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1858 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1859 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001860}
1861
1862
1863void LCodeGen::DoHasCachedArrayIndexAndBranch(
1864 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001865 Register input = ToRegister(instr->InputAt(0));
1866 Register scratch = scratch0();
1867
1868 int true_block = chunk_->LookupDestination(instr->true_block_id());
1869 int false_block = chunk_->LookupDestination(instr->false_block_id());
1870
1871 __ ldr(scratch,
1872 FieldMemOperand(input, String::kHashFieldOffset));
1873 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1874 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001875}
1876
1877
Ben Murdochb8e0da22011-05-16 14:20:40 +01001878// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001879// the temp registers, but not the input. Only input and temp2 may alias.
1880void LCodeGen::EmitClassOfTest(Label* is_true,
1881 Label* is_false,
1882 Handle<String>class_name,
1883 Register input,
1884 Register temp,
1885 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001886 ASSERT(!input.is(temp));
1887 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1888 __ tst(input, Operand(kSmiTagMask));
1889 __ b(eq, is_false);
1890 __ CompareObjectType(input, temp, temp2, FIRST_JS_OBJECT_TYPE);
1891 __ b(lt, is_false);
1892
1893 // Map is now in temp.
1894 // Functions have class 'Function'.
1895 __ CompareInstanceType(temp, temp2, JS_FUNCTION_TYPE);
1896 if (class_name->IsEqualTo(CStrVector("Function"))) {
1897 __ b(eq, is_true);
1898 } else {
1899 __ b(eq, is_false);
1900 }
1901
1902 // Check if the constructor in the map is a function.
1903 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1904
1905 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1906 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1907 // LAST_JS_OBJECT_TYPE.
1908 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1909 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1910
1911 // Objects with a non-function constructor have class 'Object'.
1912 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1913 if (class_name->IsEqualTo(CStrVector("Object"))) {
1914 __ b(ne, is_true);
1915 } else {
1916 __ b(ne, is_false);
1917 }
1918
1919 // temp now contains the constructor function. Grab the
1920 // instance class name from there.
1921 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1922 __ ldr(temp, FieldMemOperand(temp,
1923 SharedFunctionInfo::kInstanceClassNameOffset));
1924 // The class name we are testing against is a symbol because it's a literal.
1925 // The name in the constructor is a symbol because of the way the context is
1926 // booted. This routine isn't expected to work for random API-created
1927 // classes and it doesn't have to because you can't access it with natives
1928 // syntax. Since both sides are symbols it is sufficient to use an identity
1929 // comparison.
1930 __ cmp(temp, Operand(class_name));
1931 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001932}
1933
1934
1935void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001936 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001937 Register result = ToRegister(instr->result());
1938 ASSERT(input.is(result));
1939 Handle<String> class_name = instr->hydrogen()->class_name();
1940
1941 Label done, is_true, is_false;
1942
1943 EmitClassOfTest(&is_true, &is_false, class_name, input, scratch0(), input);
1944 __ b(ne, &is_false);
1945
1946 __ bind(&is_true);
1947 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1948 __ jmp(&done);
1949
1950 __ bind(&is_false);
1951 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1952 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001953}
1954
1955
1956void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001957 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001958 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001959 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001960 Handle<String> class_name = instr->hydrogen()->class_name();
1961
1962 int true_block = chunk_->LookupDestination(instr->true_block_id());
1963 int false_block = chunk_->LookupDestination(instr->false_block_id());
1964
1965 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1966 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1967
1968 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1969
1970 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001971}
1972
1973
1974void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001975 Register reg = ToRegister(instr->InputAt(0));
1976 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001977 int true_block = instr->true_block_id();
1978 int false_block = instr->false_block_id();
1979
1980 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1981 __ cmp(temp, Operand(instr->map()));
1982 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001983}
1984
1985
1986void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001987 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1988 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01001989
Ben Murdochb0fe1622011-05-05 13:52:32 +01001990 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1991 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1992
Steve Block44f0eee2011-05-26 01:26:41 +01001993 __ cmp(r0, Operand(0));
1994 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
1995 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001996}
1997
1998
1999void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002000 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
2001 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
2002
2003 int true_block = chunk_->LookupDestination(instr->true_block_id());
2004 int false_block = chunk_->LookupDestination(instr->false_block_id());
2005
2006 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2007 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01002008 __ cmp(r0, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01002009 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002010}
2011
2012
Ben Murdoch086aeea2011-05-13 15:57:08 +01002013void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002014 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2015 public:
2016 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2017 LInstanceOfKnownGlobal* instr)
2018 : LDeferredCode(codegen), instr_(instr) { }
2019 virtual void Generate() {
2020 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
2021 }
2022
2023 Label* map_check() { return &map_check_; }
2024
2025 private:
2026 LInstanceOfKnownGlobal* instr_;
2027 Label map_check_;
2028 };
2029
2030 DeferredInstanceOfKnownGlobal* deferred;
2031 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2032
2033 Label done, false_result;
2034 Register object = ToRegister(instr->InputAt(0));
2035 Register temp = ToRegister(instr->TempAt(0));
2036 Register result = ToRegister(instr->result());
2037
2038 ASSERT(object.is(r0));
2039 ASSERT(result.is(r0));
2040
2041 // A Smi is not instance of anything.
2042 __ JumpIfSmi(object, &false_result);
2043
2044 // This is the inlined call site instanceof cache. The two occurences of the
2045 // hole value will be patched to the last map/result pair generated by the
2046 // instanceof stub.
2047 Label cache_miss;
2048 Register map = temp;
2049 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2050 __ bind(deferred->map_check()); // Label for calculating code patching.
2051 // We use Factory::the_hole_value() on purpose instead of loading from the
2052 // root array to force relocation to be able to later patch with
2053 // the cached map.
Steve Block44f0eee2011-05-26 01:26:41 +01002054 __ mov(ip, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002055 __ cmp(map, Operand(ip));
2056 __ b(ne, &cache_miss);
2057 // We use Factory::the_hole_value() on purpose instead of loading from the
2058 // root array to force relocation to be able to later patch
2059 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002060 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002061 __ b(&done);
2062
2063 // The inlined call site cache did not match. Check null and string before
2064 // calling the deferred code.
2065 __ bind(&cache_miss);
2066 // Null is not instance of anything.
2067 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2068 __ cmp(object, Operand(ip));
2069 __ b(eq, &false_result);
2070
2071 // String values is not instance of anything.
2072 Condition is_string = masm_->IsObjectStringType(object, temp);
2073 __ b(is_string, &false_result);
2074
2075 // Go to the deferred code.
2076 __ b(deferred->entry());
2077
2078 __ bind(&false_result);
2079 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2080
2081 // Here result has either true or false. Deferred code also produces true or
2082 // false object.
2083 __ bind(deferred->exit());
2084 __ bind(&done);
2085}
2086
2087
2088void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2089 Label* map_check) {
2090 Register result = ToRegister(instr->result());
2091 ASSERT(result.is(r0));
2092
2093 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2094 flags = static_cast<InstanceofStub::Flags>(
2095 flags | InstanceofStub::kArgsInRegisters);
2096 flags = static_cast<InstanceofStub::Flags>(
2097 flags | InstanceofStub::kCallSiteInlineCheck);
2098 flags = static_cast<InstanceofStub::Flags>(
2099 flags | InstanceofStub::kReturnTrueFalseObject);
2100 InstanceofStub stub(flags);
2101
Ben Murdoch8b112d22011-06-08 16:22:53 +01002102 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002103
2104 // Get the temp register reserved by the instruction. This needs to be r4 as
2105 // its slot of the pushing of safepoint registers is used to communicate the
2106 // offset to the location of the map check.
2107 Register temp = ToRegister(instr->TempAt(0));
2108 ASSERT(temp.is(r4));
2109 __ mov(InstanceofStub::right(), Operand(instr->function()));
2110 static const int kAdditionalDelta = 4;
2111 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2112 Label before_push_delta;
2113 __ bind(&before_push_delta);
2114 __ BlockConstPoolFor(kAdditionalDelta);
2115 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002116 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002117 CallCodeGeneric(stub.GetCode(),
2118 RelocInfo::CODE_TARGET,
2119 instr,
2120 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Steve Block1e0659c2011-05-24 12:43:12 +01002121 // Put the result value into the result register slot and
2122 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002123 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002124}
2125
Ben Murdochb0fe1622011-05-05 13:52:32 +01002126
2127static Condition ComputeCompareCondition(Token::Value op) {
2128 switch (op) {
2129 case Token::EQ_STRICT:
2130 case Token::EQ:
2131 return eq;
2132 case Token::LT:
2133 return lt;
2134 case Token::GT:
2135 return gt;
2136 case Token::LTE:
2137 return le;
2138 case Token::GTE:
2139 return ge;
2140 default:
2141 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002142 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002143 }
2144}
2145
2146
2147void LCodeGen::DoCmpT(LCmpT* instr) {
2148 Token::Value op = instr->op();
2149
2150 Handle<Code> ic = CompareIC::GetUninitialized(op);
2151 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002152 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002153
2154 Condition condition = ComputeCompareCondition(op);
2155 if (op == Token::GT || op == Token::LTE) {
2156 condition = ReverseCondition(condition);
2157 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002158 __ LoadRoot(ToRegister(instr->result()),
2159 Heap::kTrueValueRootIndex,
2160 condition);
2161 __ LoadRoot(ToRegister(instr->result()),
2162 Heap::kFalseValueRootIndex,
2163 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002164}
2165
2166
2167void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002168 Token::Value op = instr->op();
2169 int true_block = chunk_->LookupDestination(instr->true_block_id());
2170 int false_block = chunk_->LookupDestination(instr->false_block_id());
2171
2172 Handle<Code> ic = CompareIC::GetUninitialized(op);
2173 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2174
2175 // The compare stub expects compare condition and the input operands
2176 // reversed for GT and LTE.
2177 Condition condition = ComputeCompareCondition(op);
2178 if (op == Token::GT || op == Token::LTE) {
2179 condition = ReverseCondition(condition);
2180 }
2181 __ cmp(r0, Operand(0));
2182 EmitBranch(true_block, false_block, condition);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002183}
2184
2185
2186void LCodeGen::DoReturn(LReturn* instr) {
2187 if (FLAG_trace) {
2188 // Push the return value on the stack as the parameter.
2189 // Runtime::TraceExit returns its parameter in r0.
2190 __ push(r0);
2191 __ CallRuntime(Runtime::kTraceExit, 1);
2192 }
Steve Block053d10c2011-06-13 19:13:29 +01002193 int32_t sp_delta = (ParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002194 __ mov(sp, fp);
2195 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2196 __ add(sp, sp, Operand(sp_delta));
2197 __ Jump(lr);
2198}
2199
2200
Ben Murdoch8b112d22011-06-08 16:22:53 +01002201void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002202 Register result = ToRegister(instr->result());
2203 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2204 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2205 if (instr->hydrogen()->check_hole_value()) {
2206 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2207 __ cmp(result, ip);
2208 DeoptimizeIf(eq, instr->environment());
2209 }
2210}
2211
2212
Ben Murdoch8b112d22011-06-08 16:22:53 +01002213void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2214 ASSERT(ToRegister(instr->global_object()).is(r0));
2215 ASSERT(ToRegister(instr->result()).is(r0));
2216
2217 __ mov(r2, Operand(instr->name()));
2218 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2219 : RelocInfo::CODE_TARGET_CONTEXT;
2220 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2221 CallCode(ic, mode, instr);
2222}
2223
2224
2225void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002226 Register value = ToRegister(instr->InputAt(0));
2227 Register scratch = scratch0();
2228
2229 // Load the cell.
2230 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2231
2232 // If the cell we are storing to contains the hole it could have
2233 // been deleted from the property dictionary. In that case, we need
2234 // to update the property details in the property dictionary to mark
2235 // it as no longer deleted.
2236 if (instr->hydrogen()->check_hole_value()) {
2237 Register scratch2 = ToRegister(instr->TempAt(0));
2238 __ ldr(scratch2,
2239 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2240 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2241 __ cmp(scratch2, ip);
2242 DeoptimizeIf(eq, instr->environment());
2243 }
2244
2245 // Store the value.
2246 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002247}
2248
2249
Ben Murdoch8b112d22011-06-08 16:22:53 +01002250void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2251 ASSERT(ToRegister(instr->global_object()).is(r1));
2252 ASSERT(ToRegister(instr->value()).is(r0));
2253
2254 __ mov(r2, Operand(instr->name()));
2255 Handle<Code> ic = instr->strict_mode()
2256 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2257 : isolate()->builtins()->StoreIC_Initialize();
2258 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2259}
2260
2261
Ben Murdochb8e0da22011-05-16 14:20:40 +01002262void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002263 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002264 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002265 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002266}
2267
2268
Steve Block1e0659c2011-05-24 12:43:12 +01002269void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2270 Register context = ToRegister(instr->context());
2271 Register value = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002272 __ str(value, ContextOperand(context, instr->slot_index()));
2273 if (instr->needs_write_barrier()) {
2274 int offset = Context::SlotOffset(instr->slot_index());
2275 __ RecordWrite(context, Operand(offset), value, scratch0());
2276 }
2277}
2278
2279
Ben Murdochb0fe1622011-05-05 13:52:32 +01002280void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002281 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002282 Register result = ToRegister(instr->result());
2283 if (instr->hydrogen()->is_in_object()) {
2284 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2285 } else {
2286 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2287 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2288 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002289}
2290
2291
Steve Block44f0eee2011-05-26 01:26:41 +01002292void LCodeGen::EmitLoadField(Register result,
2293 Register object,
2294 Handle<Map> type,
2295 Handle<String> name) {
2296 LookupResult lookup;
2297 type->LookupInDescriptors(NULL, *name, &lookup);
2298 ASSERT(lookup.IsProperty() && lookup.type() == FIELD);
2299 int index = lookup.GetLocalFieldIndexFromMap(*type);
2300 int offset = index * kPointerSize;
2301 if (index < 0) {
2302 // Negative property indices are in-object properties, indexed
2303 // from the end of the fixed part of the object.
2304 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2305 } else {
2306 // Non-negative property indices are in the properties array.
2307 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2308 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2309 }
2310}
2311
2312
2313void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2314 Register object = ToRegister(instr->object());
2315 Register result = ToRegister(instr->result());
2316 Register scratch = scratch0();
2317 int map_count = instr->hydrogen()->types()->length();
2318 Handle<String> name = instr->hydrogen()->name();
2319 if (map_count == 0) {
2320 ASSERT(instr->hydrogen()->need_generic());
2321 __ mov(r2, Operand(name));
2322 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2323 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2324 } else {
2325 Label done;
2326 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2327 for (int i = 0; i < map_count - 1; ++i) {
2328 Handle<Map> map = instr->hydrogen()->types()->at(i);
2329 Label next;
2330 __ cmp(scratch, Operand(map));
2331 __ b(ne, &next);
2332 EmitLoadField(result, object, map, name);
2333 __ b(&done);
2334 __ bind(&next);
2335 }
2336 Handle<Map> map = instr->hydrogen()->types()->last();
2337 __ cmp(scratch, Operand(map));
2338 if (instr->hydrogen()->need_generic()) {
2339 Label generic;
2340 __ b(ne, &generic);
2341 EmitLoadField(result, object, map, name);
2342 __ b(&done);
2343 __ bind(&generic);
2344 __ mov(r2, Operand(name));
2345 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2346 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2347 } else {
2348 DeoptimizeIf(ne, instr->environment());
2349 EmitLoadField(result, object, map, name);
2350 }
2351 __ bind(&done);
2352 }
2353}
2354
2355
Ben Murdochb0fe1622011-05-05 13:52:32 +01002356void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2357 ASSERT(ToRegister(instr->object()).is(r0));
2358 ASSERT(ToRegister(instr->result()).is(r0));
2359
2360 // Name is always in r2.
2361 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002362 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002363 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2364}
2365
2366
Steve Block9fac8402011-05-12 15:51:54 +01002367void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2368 Register scratch = scratch0();
2369 Register function = ToRegister(instr->function());
2370 Register result = ToRegister(instr->result());
2371
2372 // Check that the function really is a function. Load map into the
2373 // result register.
2374 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2375 DeoptimizeIf(ne, instr->environment());
2376
2377 // Make sure that the function has an instance prototype.
2378 Label non_instance;
2379 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2380 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2381 __ b(ne, &non_instance);
2382
2383 // Get the prototype or initial map from the function.
2384 __ ldr(result,
2385 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2386
2387 // Check that the function has a prototype or an initial map.
2388 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2389 __ cmp(result, ip);
2390 DeoptimizeIf(eq, instr->environment());
2391
2392 // If the function does not have an initial map, we're done.
2393 Label done;
2394 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2395 __ b(ne, &done);
2396
2397 // Get the prototype from the initial map.
2398 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2399 __ jmp(&done);
2400
2401 // Non-instance prototype: Fetch prototype from constructor field
2402 // in initial map.
2403 __ bind(&non_instance);
2404 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2405
2406 // All done.
2407 __ bind(&done);
2408}
2409
2410
Ben Murdochb0fe1622011-05-05 13:52:32 +01002411void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002412 Register result = ToRegister(instr->result());
2413 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002414 Register scratch = scratch0();
2415
Steve Block1e0659c2011-05-24 12:43:12 +01002416 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002417 if (FLAG_debug_code) {
2418 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01002419 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002420 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2421 __ cmp(scratch, ip);
2422 __ b(eq, &done);
2423 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2424 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002425 __ b(eq, &done);
2426 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
2427 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2428 __ sub(scratch, scratch, Operand(FIRST_EXTERNAL_ARRAY_TYPE));
2429 __ cmp(scratch, Operand(kExternalArrayTypeCount));
2430 __ Check(cc, "Check for fast elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002431 __ bind(&done);
2432 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002433}
2434
2435
Steve Block44f0eee2011-05-26 01:26:41 +01002436void LCodeGen::DoLoadExternalArrayPointer(
2437 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002438 Register to_reg = ToRegister(instr->result());
2439 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002440 __ ldr(to_reg, FieldMemOperand(from_reg,
2441 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002442}
2443
2444
Ben Murdochb0fe1622011-05-05 13:52:32 +01002445void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002446 Register arguments = ToRegister(instr->arguments());
2447 Register length = ToRegister(instr->length());
2448 Register index = ToRegister(instr->index());
2449 Register result = ToRegister(instr->result());
2450
2451 // Bailout index is not a valid argument index. Use unsigned check to get
2452 // negative check for free.
2453 __ sub(length, length, index, SetCC);
2454 DeoptimizeIf(ls, instr->environment());
2455
2456 // There are two words between the frame pointer and the last argument.
2457 // Subtracting from length accounts for one of them add one more.
2458 __ add(length, length, Operand(1));
2459 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002460}
2461
2462
2463void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002464 Register elements = ToRegister(instr->elements());
2465 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002466 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002467 Register scratch = scratch0();
Ben Murdochb8e0da22011-05-16 14:20:40 +01002468 ASSERT(result.is(elements));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002469
2470 // Load the result.
2471 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2472 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2473
Ben Murdochb8e0da22011-05-16 14:20:40 +01002474 // Check for the hole value.
2475 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2476 __ cmp(result, scratch);
2477 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002478}
2479
2480
Steve Block44f0eee2011-05-26 01:26:41 +01002481void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2482 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002483 Register external_pointer = ToRegister(instr->external_pointer());
Steve Block1e0659c2011-05-24 12:43:12 +01002484 Register key = ToRegister(instr->key());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002485 ExternalArrayType array_type = instr->array_type();
2486 if (array_type == kExternalFloatArray) {
2487 CpuFeatures::Scope scope(VFP3);
2488 DwVfpRegister result(ToDoubleRegister(instr->result()));
2489 __ add(scratch0(), external_pointer, Operand(key, LSL, 2));
2490 __ vldr(result.low(), scratch0(), 0);
2491 __ vcvt_f64_f32(result, result.low());
2492 } else {
2493 Register result(ToRegister(instr->result()));
2494 switch (array_type) {
2495 case kExternalByteArray:
2496 __ ldrsb(result, MemOperand(external_pointer, key));
2497 break;
2498 case kExternalUnsignedByteArray:
2499 case kExternalPixelArray:
2500 __ ldrb(result, MemOperand(external_pointer, key));
2501 break;
2502 case kExternalShortArray:
2503 __ ldrsh(result, MemOperand(external_pointer, key, LSL, 1));
2504 break;
2505 case kExternalUnsignedShortArray:
2506 __ ldrh(result, MemOperand(external_pointer, key, LSL, 1));
2507 break;
2508 case kExternalIntArray:
2509 __ ldr(result, MemOperand(external_pointer, key, LSL, 2));
2510 break;
2511 case kExternalUnsignedIntArray:
2512 __ ldr(result, MemOperand(external_pointer, key, LSL, 2));
2513 __ cmp(result, Operand(0x80000000));
2514 // TODO(danno): we could be more clever here, perhaps having a special
2515 // version of the stub that detects if the overflow case actually
2516 // happens, and generate code that returns a double rather than int.
2517 DeoptimizeIf(cs, instr->environment());
2518 break;
2519 case kExternalFloatArray:
2520 UNREACHABLE();
2521 break;
2522 }
2523 }
Steve Block1e0659c2011-05-24 12:43:12 +01002524}
2525
2526
Ben Murdochb0fe1622011-05-05 13:52:32 +01002527void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2528 ASSERT(ToRegister(instr->object()).is(r1));
2529 ASSERT(ToRegister(instr->key()).is(r0));
2530
Steve Block44f0eee2011-05-26 01:26:41 +01002531 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002532 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2533}
2534
2535
2536void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002537 Register scratch = scratch0();
2538 Register result = ToRegister(instr->result());
2539
2540 // Check if the calling frame is an arguments adaptor frame.
2541 Label done, adapted;
2542 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2543 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2544 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2545
2546 // Result is the frame pointer for the frame if not adapted and for the real
2547 // frame below the adaptor frame if adapted.
2548 __ mov(result, fp, LeaveCC, ne);
2549 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002550}
2551
2552
2553void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002554 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002555 Register result = ToRegister(instr->result());
2556
2557 Label done;
2558
2559 // If no arguments adaptor frame the number of arguments is fixed.
2560 __ cmp(fp, elem);
2561 __ mov(result, Operand(scope()->num_parameters()));
2562 __ b(eq, &done);
2563
2564 // Arguments adaptor frame present. Get argument length from there.
2565 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2566 __ ldr(result,
2567 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2568 __ SmiUntag(result);
2569
2570 // Argument length is in result register.
2571 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002572}
2573
2574
2575void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002576 Register receiver = ToRegister(instr->receiver());
2577 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002578 Register length = ToRegister(instr->length());
2579 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002580 Register scratch = scratch0();
2581 ASSERT(receiver.is(r0)); // Used for parameter count.
2582 ASSERT(function.is(r1)); // Required by InvokeFunction.
2583 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002584
Steve Block1e0659c2011-05-24 12:43:12 +01002585 // If the receiver is null or undefined, we have to pass the global object
2586 // as a receiver.
2587 Label global_object, receiver_ok;
2588 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2589 __ cmp(receiver, scratch);
2590 __ b(eq, &global_object);
2591 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2592 __ cmp(receiver, scratch);
2593 __ b(eq, &global_object);
2594
2595 // Deoptimize if the receiver is not a JS object.
2596 __ tst(receiver, Operand(kSmiTagMask));
2597 DeoptimizeIf(eq, instr->environment());
2598 __ CompareObjectType(receiver, scratch, scratch, FIRST_JS_OBJECT_TYPE);
2599 DeoptimizeIf(lo, instr->environment());
2600 __ jmp(&receiver_ok);
2601
2602 __ bind(&global_object);
2603 __ ldr(receiver, GlobalObjectOperand());
2604 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002605
2606 // Copy the arguments to this function possibly from the
2607 // adaptor frame below it.
2608 const uint32_t kArgumentsLimit = 1 * KB;
2609 __ cmp(length, Operand(kArgumentsLimit));
2610 DeoptimizeIf(hi, instr->environment());
2611
2612 // Push the receiver and use the register to keep the original
2613 // number of arguments.
2614 __ push(receiver);
2615 __ mov(receiver, length);
2616 // The arguments are at a one pointer size offset from elements.
2617 __ add(elements, elements, Operand(1 * kPointerSize));
2618
2619 // Loop through the arguments pushing them onto the execution
2620 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002621 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002622 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002623 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002624 __ b(eq, &invoke);
2625 __ bind(&loop);
2626 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2627 __ push(scratch);
2628 __ sub(length, length, Operand(1), SetCC);
2629 __ b(ne, &loop);
2630
2631 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002632 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2633 LPointerMap* pointers = instr->pointer_map();
2634 LEnvironment* env = instr->deoptimization_environment();
2635 RecordPosition(pointers->position());
2636 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002637 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002638 pointers,
2639 env->deoptimization_index());
2640 // The number of arguments is stored in receiver which is r0, as expected
2641 // by InvokeFunction.
2642 v8::internal::ParameterCount actual(receiver);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002643 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
Steve Block1e0659c2011-05-24 12:43:12 +01002644 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002645}
2646
2647
2648void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002649 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002650 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2651 Abort("DoPushArgument not implemented for double type.");
2652 } else {
2653 Register argument_reg = EmitLoadRegister(argument, ip);
2654 __ push(argument_reg);
2655 }
2656}
2657
2658
Steve Block1e0659c2011-05-24 12:43:12 +01002659void LCodeGen::DoContext(LContext* instr) {
2660 Register result = ToRegister(instr->result());
2661 __ mov(result, cp);
2662}
2663
2664
2665void LCodeGen::DoOuterContext(LOuterContext* instr) {
2666 Register context = ToRegister(instr->context());
2667 Register result = ToRegister(instr->result());
2668 __ ldr(result,
2669 MemOperand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2670 __ ldr(result, FieldMemOperand(result, JSFunction::kContextOffset));
2671}
2672
2673
Ben Murdochb0fe1622011-05-05 13:52:32 +01002674void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002675 Register context = ToRegister(instr->context());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002676 Register result = ToRegister(instr->result());
2677 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2678}
2679
2680
2681void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002682 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002683 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002684 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002685}
2686
2687
2688void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2689 int arity,
2690 LInstruction* instr) {
2691 // Change context if needed.
2692 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002693 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002694 scope()->contains_with() ||
2695 (scope()->num_heap_slots() > 0);
2696 if (change_context) {
2697 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2698 }
2699
2700 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2701 // is available to write to at this point.
2702 if (!function->NeedsArgumentsAdaption()) {
2703 __ mov(r0, Operand(arity));
2704 }
2705
2706 LPointerMap* pointers = instr->pointer_map();
2707 RecordPosition(pointers->position());
2708
2709 // Invoke function.
2710 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2711 __ Call(ip);
2712
2713 // Setup deoptimization.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002714 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002715
2716 // Restore context.
2717 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2718}
2719
2720
2721void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002722 ASSERT(ToRegister(instr->result()).is(r0));
2723 __ mov(r1, Operand(instr->function()));
2724 CallKnownFunction(instr->function(), instr->arity(), instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002725}
2726
2727
2728void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002729 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2730 Register input = ToRegister(instr->InputAt(0));
2731 Register scratch = scratch0();
2732
2733 // Deoptimize if not a heap number.
2734 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2735 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2736 __ cmp(scratch, Operand(ip));
2737 DeoptimizeIf(ne, instr->environment());
2738
2739 Label done;
2740 Register exponent = scratch0();
2741 scratch = no_reg;
2742 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2743 // Check the sign of the argument. If the argument is positive, just
2744 // return it. We do not need to patch the stack since |input| and
2745 // |result| are the same register and |input| would be restored
2746 // unchanged by popping safepoint registers.
2747 __ tst(exponent, Operand(HeapNumber::kSignMask));
2748 __ b(eq, &done);
2749
2750 // Input is negative. Reverse its sign.
2751 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002752 {
2753 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002754
Ben Murdoch8b112d22011-06-08 16:22:53 +01002755 // Registers were saved at the safepoint, so we can use
2756 // many scratch registers.
2757 Register tmp1 = input.is(r1) ? r0 : r1;
2758 Register tmp2 = input.is(r2) ? r0 : r2;
2759 Register tmp3 = input.is(r3) ? r0 : r3;
2760 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01002761
Ben Murdoch8b112d22011-06-08 16:22:53 +01002762 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01002763
Ben Murdoch8b112d22011-06-08 16:22:53 +01002764 Label allocated, slow;
2765 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2766 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2767 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01002768
Ben Murdoch8b112d22011-06-08 16:22:53 +01002769 // Slow case: Call the runtime system to do the number allocation.
2770 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01002771
Ben Murdoch8b112d22011-06-08 16:22:53 +01002772 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2773 // Set the pointer to the new heap number in tmp.
2774 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2775 // Restore input_reg after call to runtime.
2776 __ LoadFromSafepointRegisterSlot(input, input);
2777 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002778
Ben Murdoch8b112d22011-06-08 16:22:53 +01002779 __ bind(&allocated);
2780 // exponent: floating point exponent value.
2781 // tmp1: allocated heap number.
2782 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2783 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2784 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2785 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002786
Ben Murdoch8b112d22011-06-08 16:22:53 +01002787 __ StoreToSafepointRegisterSlot(tmp1, input);
2788 }
Steve Block1e0659c2011-05-24 12:43:12 +01002789
2790 __ bind(&done);
2791}
2792
2793
2794void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2795 Register input = ToRegister(instr->InputAt(0));
2796 __ cmp(input, Operand(0));
2797 // We can make rsb conditional because the previous cmp instruction
2798 // will clear the V (overflow) flag and rsb won't set this flag
2799 // if input is positive.
2800 __ rsb(input, input, Operand(0), SetCC, mi);
2801 // Deoptimize on overflow.
2802 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002803}
2804
2805
2806void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002807 // Class for deferred case.
2808 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2809 public:
2810 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2811 LUnaryMathOperation* instr)
2812 : LDeferredCode(codegen), instr_(instr) { }
2813 virtual void Generate() {
2814 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2815 }
2816 private:
2817 LUnaryMathOperation* instr_;
2818 };
2819
2820 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2821 Representation r = instr->hydrogen()->value()->representation();
2822 if (r.IsDouble()) {
2823 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
2824 __ vabs(input, input);
2825 } else if (r.IsInteger32()) {
2826 EmitIntegerMathAbs(instr);
2827 } else {
2828 // Representation is tagged.
2829 DeferredMathAbsTaggedHeapNumber* deferred =
2830 new DeferredMathAbsTaggedHeapNumber(this, instr);
2831 Register input = ToRegister(instr->InputAt(0));
2832 // Smi check.
2833 __ JumpIfNotSmi(input, deferred->entry());
2834 // If smi, handle it directly.
2835 EmitIntegerMathAbs(instr);
2836 __ bind(deferred->exit());
2837 }
2838}
2839
2840
Ben Murdochb0fe1622011-05-05 13:52:32 +01002841void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002842 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002843 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002844 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002845 Register scratch1 = scratch0();
2846 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002847
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002848 __ EmitVFPTruncate(kRoundToMinusInf,
2849 single_scratch,
2850 input,
2851 scratch1,
2852 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002853 DeoptimizeIf(ne, instr->environment());
2854
2855 // Move the result back to general purpose register r0.
2856 __ vmov(result, single_scratch);
2857
Steve Block44f0eee2011-05-26 01:26:41 +01002858 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2859 // Test for -0.
2860 Label done;
2861 __ cmp(result, Operand(0));
2862 __ b(ne, &done);
2863 __ vmov(scratch1, input.high());
2864 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2865 DeoptimizeIf(ne, instr->environment());
2866 __ bind(&done);
2867 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002868}
2869
2870
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002871void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2872 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2873 Register result = ToRegister(instr->result());
Steve Block053d10c2011-06-13 19:13:29 +01002874 Register scratch1 = scratch0();
2875 Register scratch2 = result;
2876 __ EmitVFPTruncate(kRoundToNearest,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002877 double_scratch0().low(),
2878 input,
2879 scratch1,
2880 scratch2);
2881 DeoptimizeIf(ne, instr->environment());
2882 __ vmov(result, double_scratch0().low());
2883
Steve Block44f0eee2011-05-26 01:26:41 +01002884 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2885 // Test for -0.
Steve Block053d10c2011-06-13 19:13:29 +01002886 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002887 __ cmp(result, Operand(0));
2888 __ b(ne, &done);
2889 __ vmov(scratch1, input.high());
2890 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2891 DeoptimizeIf(ne, instr->environment());
Steve Block053d10c2011-06-13 19:13:29 +01002892 __ bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01002893 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002894}
2895
2896
Ben Murdochb0fe1622011-05-05 13:52:32 +01002897void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002898 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002899 ASSERT(ToDoubleRegister(instr->result()).is(input));
2900 __ vsqrt(input, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002901}
2902
2903
Steve Block44f0eee2011-05-26 01:26:41 +01002904void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2905 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2906 Register scratch = scratch0();
2907 SwVfpRegister single_scratch = double_scratch0().low();
2908 DoubleRegister double_scratch = double_scratch0();
2909 ASSERT(ToDoubleRegister(instr->result()).is(input));
2910
2911 // Add +0 to convert -0 to +0.
2912 __ mov(scratch, Operand(0));
2913 __ vmov(single_scratch, scratch);
2914 __ vcvt_f64_s32(double_scratch, single_scratch);
2915 __ vadd(input, input, double_scratch);
2916 __ vsqrt(input, input);
2917}
2918
2919
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002920void LCodeGen::DoPower(LPower* instr) {
2921 LOperand* left = instr->InputAt(0);
2922 LOperand* right = instr->InputAt(1);
2923 Register scratch = scratch0();
2924 DoubleRegister result_reg = ToDoubleRegister(instr->result());
2925 Representation exponent_type = instr->hydrogen()->right()->representation();
2926 if (exponent_type.IsDouble()) {
2927 // Prepare arguments and call C function.
2928 __ PrepareCallCFunction(4, scratch);
2929 __ vmov(r0, r1, ToDoubleRegister(left));
2930 __ vmov(r2, r3, ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01002931 __ CallCFunction(
2932 ExternalReference::power_double_double_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002933 } else if (exponent_type.IsInteger32()) {
2934 ASSERT(ToRegister(right).is(r0));
2935 // Prepare arguments and call C function.
2936 __ PrepareCallCFunction(4, scratch);
2937 __ mov(r2, ToRegister(right));
2938 __ vmov(r0, r1, ToDoubleRegister(left));
Steve Block44f0eee2011-05-26 01:26:41 +01002939 __ CallCFunction(
2940 ExternalReference::power_double_int_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002941 } else {
2942 ASSERT(exponent_type.IsTagged());
2943 ASSERT(instr->hydrogen()->left()->representation().IsDouble());
2944
2945 Register right_reg = ToRegister(right);
2946
2947 // Check for smi on the right hand side.
2948 Label non_smi, call;
2949 __ JumpIfNotSmi(right_reg, &non_smi);
2950
2951 // Untag smi and convert it to a double.
2952 __ SmiUntag(right_reg);
2953 SwVfpRegister single_scratch = double_scratch0().low();
2954 __ vmov(single_scratch, right_reg);
2955 __ vcvt_f64_s32(result_reg, single_scratch);
2956 __ jmp(&call);
2957
2958 // Heap number map check.
2959 __ bind(&non_smi);
2960 __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
2961 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2962 __ cmp(scratch, Operand(ip));
2963 DeoptimizeIf(ne, instr->environment());
2964 int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
2965 __ add(scratch, right_reg, Operand(value_offset));
2966 __ vldr(result_reg, scratch, 0);
2967
2968 // Prepare arguments and call C function.
2969 __ bind(&call);
2970 __ PrepareCallCFunction(4, scratch);
2971 __ vmov(r0, r1, ToDoubleRegister(left));
2972 __ vmov(r2, r3, result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01002973 __ CallCFunction(
2974 ExternalReference::power_double_double_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002975 }
2976 // Store the result in the result register.
2977 __ GetCFunctionDoubleResult(result_reg);
2978}
2979
2980
2981void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2982 ASSERT(ToDoubleRegister(instr->result()).is(d2));
2983 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2984 TranscendentalCacheStub::UNTAGGED);
2985 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2986}
2987
2988
2989void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2990 ASSERT(ToDoubleRegister(instr->result()).is(d2));
2991 TranscendentalCacheStub stub(TranscendentalCache::COS,
2992 TranscendentalCacheStub::UNTAGGED);
2993 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2994}
2995
2996
2997void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2998 ASSERT(ToDoubleRegister(instr->result()).is(d2));
2999 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3000 TranscendentalCacheStub::UNTAGGED);
3001 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3002}
3003
3004
Ben Murdochb0fe1622011-05-05 13:52:32 +01003005void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3006 switch (instr->op()) {
3007 case kMathAbs:
3008 DoMathAbs(instr);
3009 break;
3010 case kMathFloor:
3011 DoMathFloor(instr);
3012 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003013 case kMathRound:
3014 DoMathRound(instr);
3015 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003016 case kMathSqrt:
3017 DoMathSqrt(instr);
3018 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003019 case kMathPowHalf:
3020 DoMathPowHalf(instr);
3021 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003022 case kMathCos:
3023 DoMathCos(instr);
3024 break;
3025 case kMathSin:
3026 DoMathSin(instr);
3027 break;
3028 case kMathLog:
3029 DoMathLog(instr);
3030 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003031 default:
3032 Abort("Unimplemented type of LUnaryMathOperation.");
3033 UNREACHABLE();
3034 }
3035}
3036
3037
3038void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003039 ASSERT(ToRegister(instr->result()).is(r0));
3040
3041 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003042 Handle<Code> ic =
3043 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003044 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3045 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003046}
3047
3048
3049void LCodeGen::DoCallNamed(LCallNamed* instr) {
3050 ASSERT(ToRegister(instr->result()).is(r0));
3051
3052 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003053 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
3054 arity, NOT_IN_LOOP);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003055 __ mov(r2, Operand(instr->name()));
3056 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3057 // Restore context register.
3058 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3059}
3060
3061
3062void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003063 ASSERT(ToRegister(instr->result()).is(r0));
3064
3065 int arity = instr->arity();
3066 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
3067 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3068 __ Drop(1);
3069 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003070}
3071
3072
3073void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003074 ASSERT(ToRegister(instr->result()).is(r0));
3075
3076 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003077 Handle<Code> ic =
3078 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003079 __ mov(r2, Operand(instr->name()));
3080 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
3081 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003082}
3083
3084
3085void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3086 ASSERT(ToRegister(instr->result()).is(r0));
3087 __ mov(r1, Operand(instr->target()));
3088 CallKnownFunction(instr->target(), instr->arity(), instr);
3089}
3090
3091
3092void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003093 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003094 ASSERT(ToRegister(instr->result()).is(r0));
3095
Steve Block44f0eee2011-05-26 01:26:41 +01003096 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003097 __ mov(r0, Operand(instr->arity()));
3098 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3099}
3100
3101
3102void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3103 CallRuntime(instr->function(), instr->arity(), instr);
3104}
3105
3106
3107void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003108 Register object = ToRegister(instr->object());
3109 Register value = ToRegister(instr->value());
3110 Register scratch = scratch0();
3111 int offset = instr->offset();
3112
3113 ASSERT(!object.is(value));
3114
3115 if (!instr->transition().is_null()) {
3116 __ mov(scratch, Operand(instr->transition()));
3117 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3118 }
3119
3120 // Do the store.
3121 if (instr->is_in_object()) {
3122 __ str(value, FieldMemOperand(object, offset));
3123 if (instr->needs_write_barrier()) {
3124 // Update the write barrier for the object for in-object properties.
3125 __ RecordWrite(object, Operand(offset), value, scratch);
3126 }
3127 } else {
3128 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3129 __ str(value, FieldMemOperand(scratch, offset));
3130 if (instr->needs_write_barrier()) {
3131 // Update the write barrier for the properties array.
3132 // object is used as a scratch register.
3133 __ RecordWrite(scratch, Operand(offset), value, object);
3134 }
3135 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003136}
3137
3138
3139void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3140 ASSERT(ToRegister(instr->object()).is(r1));
3141 ASSERT(ToRegister(instr->value()).is(r0));
3142
3143 // Name is always in r2.
3144 __ mov(r2, Operand(instr->name()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003145 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003146 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3147 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003148 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3149}
3150
3151
3152void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003153 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003154 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003155}
3156
3157
3158void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003159 Register value = ToRegister(instr->value());
3160 Register elements = ToRegister(instr->object());
3161 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3162 Register scratch = scratch0();
3163
3164 // Do the store.
3165 if (instr->key()->IsConstantOperand()) {
3166 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3167 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3168 int offset =
3169 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3170 __ str(value, FieldMemOperand(elements, offset));
3171 } else {
3172 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3173 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3174 }
3175
3176 if (instr->hydrogen()->NeedsWriteBarrier()) {
3177 // Compute address of modified element and store it into key register.
3178 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
3179 __ RecordWrite(elements, key, value);
3180 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003181}
3182
3183
Steve Block44f0eee2011-05-26 01:26:41 +01003184void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3185 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003186
3187 Register external_pointer = ToRegister(instr->external_pointer());
3188 Register key = ToRegister(instr->key());
Ben Murdoch8b112d22011-06-08 16:22:53 +01003189 ExternalArrayType array_type = instr->array_type();
3190 if (array_type == kExternalFloatArray) {
3191 CpuFeatures::Scope scope(VFP3);
3192 DwVfpRegister value(ToDoubleRegister(instr->value()));
3193 __ add(scratch0(), external_pointer, Operand(key, LSL, 2));
3194 __ vcvt_f32_f64(double_scratch0().low(), value);
3195 __ vstr(double_scratch0().low(), scratch0(), 0);
3196 } else {
3197 Register value(ToRegister(instr->value()));
3198 switch (array_type) {
3199 case kExternalPixelArray:
3200 // Clamp the value to [0..255].
3201 __ Usat(value, 8, Operand(value));
3202 __ strb(value, MemOperand(external_pointer, key));
3203 break;
3204 case kExternalByteArray:
3205 case kExternalUnsignedByteArray:
3206 __ strb(value, MemOperand(external_pointer, key));
3207 break;
3208 case kExternalShortArray:
3209 case kExternalUnsignedShortArray:
3210 __ strh(value, MemOperand(external_pointer, key, LSL, 1));
3211 break;
3212 case kExternalIntArray:
3213 case kExternalUnsignedIntArray:
3214 __ str(value, MemOperand(external_pointer, key, LSL, 2));
3215 break;
3216 case kExternalFloatArray:
3217 UNREACHABLE();
3218 break;
3219 }
3220 }
Steve Block44f0eee2011-05-26 01:26:41 +01003221}
3222
3223
Ben Murdochb0fe1622011-05-05 13:52:32 +01003224void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3225 ASSERT(ToRegister(instr->object()).is(r2));
3226 ASSERT(ToRegister(instr->key()).is(r1));
3227 ASSERT(ToRegister(instr->value()).is(r0));
3228
Ben Murdoch8b112d22011-06-08 16:22:53 +01003229 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003230 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3231 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003232 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3233}
3234
3235
Steve Block1e0659c2011-05-24 12:43:12 +01003236void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3237 class DeferredStringCharCodeAt: public LDeferredCode {
3238 public:
3239 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3240 : LDeferredCode(codegen), instr_(instr) { }
3241 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3242 private:
3243 LStringCharCodeAt* instr_;
3244 };
3245
3246 Register scratch = scratch0();
3247 Register string = ToRegister(instr->string());
3248 Register index = no_reg;
3249 int const_index = -1;
3250 if (instr->index()->IsConstantOperand()) {
3251 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3252 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3253 if (!Smi::IsValid(const_index)) {
3254 // Guaranteed to be out of bounds because of the assert above.
3255 // So the bounds check that must dominate this instruction must
3256 // have deoptimized already.
3257 if (FLAG_debug_code) {
3258 __ Abort("StringCharCodeAt: out of bounds index.");
3259 }
3260 // No code needs to be generated.
3261 return;
3262 }
3263 } else {
3264 index = ToRegister(instr->index());
3265 }
3266 Register result = ToRegister(instr->result());
3267
3268 DeferredStringCharCodeAt* deferred =
3269 new DeferredStringCharCodeAt(this, instr);
3270
3271 Label flat_string, ascii_string, done;
3272
3273 // Fetch the instance type of the receiver into result register.
3274 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3275 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3276
3277 // We need special handling for non-flat strings.
3278 STATIC_ASSERT(kSeqStringTag == 0);
3279 __ tst(result, Operand(kStringRepresentationMask));
3280 __ b(eq, &flat_string);
3281
3282 // Handle non-flat strings.
3283 __ tst(result, Operand(kIsConsStringMask));
3284 __ b(eq, deferred->entry());
3285
3286 // ConsString.
3287 // Check whether the right hand side is the empty string (i.e. if
3288 // this is really a flat string in a cons string). If that is not
3289 // the case we would rather go to the runtime system now to flatten
3290 // the string.
3291 __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset));
3292 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
3293 __ cmp(scratch, ip);
3294 __ b(ne, deferred->entry());
3295 // Get the first of the two strings and load its instance type.
3296 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
3297 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3298 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3299 // If the first cons component is also non-flat, then go to runtime.
3300 STATIC_ASSERT(kSeqStringTag == 0);
3301 __ tst(result, Operand(kStringRepresentationMask));
3302 __ b(ne, deferred->entry());
3303
3304 // Check for 1-byte or 2-byte string.
3305 __ bind(&flat_string);
3306 STATIC_ASSERT(kAsciiStringTag != 0);
3307 __ tst(result, Operand(kStringEncodingMask));
3308 __ b(ne, &ascii_string);
3309
3310 // 2-byte string.
3311 // Load the 2-byte character code into the result register.
3312 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3313 if (instr->index()->IsConstantOperand()) {
3314 __ ldrh(result,
3315 FieldMemOperand(string,
3316 SeqTwoByteString::kHeaderSize + 2 * const_index));
3317 } else {
3318 __ add(scratch,
3319 string,
3320 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3321 __ ldrh(result, MemOperand(scratch, index, LSL, 1));
3322 }
3323 __ jmp(&done);
3324
3325 // ASCII string.
3326 // Load the byte into the result register.
3327 __ bind(&ascii_string);
3328 if (instr->index()->IsConstantOperand()) {
3329 __ ldrb(result, FieldMemOperand(string,
3330 SeqAsciiString::kHeaderSize + const_index));
3331 } else {
3332 __ add(scratch,
3333 string,
3334 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3335 __ ldrb(result, MemOperand(scratch, index));
3336 }
3337 __ bind(&done);
3338 __ bind(deferred->exit());
3339}
3340
3341
3342void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3343 Register string = ToRegister(instr->string());
3344 Register result = ToRegister(instr->result());
3345 Register scratch = scratch0();
3346
3347 // TODO(3095996): Get rid of this. For now, we need to make the
3348 // result register contain a valid pointer because it is already
3349 // contained in the register pointer map.
3350 __ mov(result, Operand(0));
3351
Ben Murdoch8b112d22011-06-08 16:22:53 +01003352 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003353 __ push(string);
3354 // Push the index as a smi. This is safe because of the checks in
3355 // DoStringCharCodeAt above.
3356 if (instr->index()->IsConstantOperand()) {
3357 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3358 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3359 __ push(scratch);
3360 } else {
3361 Register index = ToRegister(instr->index());
3362 __ SmiTag(index);
3363 __ push(index);
3364 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003365 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003366 if (FLAG_debug_code) {
3367 __ AbortIfNotSmi(r0);
3368 }
3369 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003370 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003371}
3372
3373
Steve Block44f0eee2011-05-26 01:26:41 +01003374void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3375 class DeferredStringCharFromCode: public LDeferredCode {
3376 public:
3377 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3378 : LDeferredCode(codegen), instr_(instr) { }
3379 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3380 private:
3381 LStringCharFromCode* instr_;
3382 };
3383
3384 DeferredStringCharFromCode* deferred =
3385 new DeferredStringCharFromCode(this, instr);
3386
3387 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3388 Register char_code = ToRegister(instr->char_code());
3389 Register result = ToRegister(instr->result());
3390 ASSERT(!char_code.is(result));
3391
3392 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3393 __ b(hi, deferred->entry());
3394 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3395 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3396 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3397 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3398 __ cmp(result, ip);
3399 __ b(eq, deferred->entry());
3400 __ bind(deferred->exit());
3401}
3402
3403
3404void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3405 Register char_code = ToRegister(instr->char_code());
3406 Register result = ToRegister(instr->result());
3407
3408 // TODO(3095996): Get rid of this. For now, we need to make the
3409 // result register contain a valid pointer because it is already
3410 // contained in the register pointer map.
3411 __ mov(result, Operand(0));
3412
Ben Murdoch8b112d22011-06-08 16:22:53 +01003413 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003414 __ SmiTag(char_code);
3415 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003416 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003417 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003418}
3419
3420
Steve Block1e0659c2011-05-24 12:43:12 +01003421void LCodeGen::DoStringLength(LStringLength* instr) {
3422 Register string = ToRegister(instr->InputAt(0));
3423 Register result = ToRegister(instr->result());
3424 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3425}
3426
3427
Ben Murdochb0fe1622011-05-05 13:52:32 +01003428void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003429 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003430 ASSERT(input->IsRegister() || input->IsStackSlot());
3431 LOperand* output = instr->result();
3432 ASSERT(output->IsDoubleRegister());
3433 SwVfpRegister single_scratch = double_scratch0().low();
3434 if (input->IsStackSlot()) {
3435 Register scratch = scratch0();
3436 __ ldr(scratch, ToMemOperand(input));
3437 __ vmov(single_scratch, scratch);
3438 } else {
3439 __ vmov(single_scratch, ToRegister(input));
3440 }
3441 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003442}
3443
3444
3445void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3446 class DeferredNumberTagI: public LDeferredCode {
3447 public:
3448 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3449 : LDeferredCode(codegen), instr_(instr) { }
3450 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3451 private:
3452 LNumberTagI* instr_;
3453 };
3454
Steve Block1e0659c2011-05-24 12:43:12 +01003455 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003456 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3457 Register reg = ToRegister(input);
3458
3459 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3460 __ SmiTag(reg, SetCC);
3461 __ b(vs, deferred->entry());
3462 __ bind(deferred->exit());
3463}
3464
3465
3466void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3467 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003468 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003469 DoubleRegister dbl_scratch = d0;
3470 SwVfpRegister flt_scratch = s0;
3471
3472 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003473 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003474
3475 // There was overflow, so bits 30 and 31 of the original integer
3476 // disagree. Try to allocate a heap number in new space and store
3477 // the value in there. If that fails, call the runtime system.
3478 Label done;
3479 __ SmiUntag(reg);
3480 __ eor(reg, reg, Operand(0x80000000));
3481 __ vmov(flt_scratch, reg);
3482 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3483 if (FLAG_inline_new) {
3484 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3485 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3486 if (!reg.is(r5)) __ mov(reg, r5);
3487 __ b(&done);
3488 }
3489
3490 // Slow case: Call the runtime system to do the number allocation.
3491 __ bind(&slow);
3492
3493 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3494 // register is stored, as this register is in the pointer map, but contains an
3495 // integer value.
3496 __ mov(ip, Operand(0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003497 __ StoreToSafepointRegisterSlot(ip, reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003498 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003499 if (!reg.is(r0)) __ mov(reg, r0);
3500
3501 // Done. Put the value in dbl_scratch into the value of the allocated heap
3502 // number.
3503 __ bind(&done);
3504 __ sub(ip, reg, Operand(kHeapObjectTag));
3505 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003506 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003507}
3508
3509
3510void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3511 class DeferredNumberTagD: public LDeferredCode {
3512 public:
3513 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3514 : LDeferredCode(codegen), instr_(instr) { }
3515 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3516 private:
3517 LNumberTagD* instr_;
3518 };
3519
Steve Block1e0659c2011-05-24 12:43:12 +01003520 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003521 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003522 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003523 Register temp1 = ToRegister(instr->TempAt(0));
3524 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003525
3526 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3527 if (FLAG_inline_new) {
3528 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3529 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3530 } else {
3531 __ jmp(deferred->entry());
3532 }
3533 __ bind(deferred->exit());
3534 __ sub(ip, reg, Operand(kHeapObjectTag));
3535 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3536}
3537
3538
3539void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3540 // TODO(3095996): Get rid of this. For now, we need to make the
3541 // result register contain a valid pointer because it is already
3542 // contained in the register pointer map.
3543 Register reg = ToRegister(instr->result());
3544 __ mov(reg, Operand(0));
3545
Ben Murdoch8b112d22011-06-08 16:22:53 +01003546 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3547 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003548 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003549}
3550
3551
3552void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003553 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003554 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3555 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3556 __ SmiTag(ToRegister(input));
3557}
3558
3559
3560void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003561 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003562 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3563 if (instr->needs_check()) {
3564 __ tst(ToRegister(input), Operand(kSmiTagMask));
3565 DeoptimizeIf(ne, instr->environment());
3566 }
3567 __ SmiUntag(ToRegister(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003568}
3569
3570
3571void LCodeGen::EmitNumberUntagD(Register input_reg,
3572 DoubleRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003573 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003574 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003575 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003576 SwVfpRegister flt_scratch = s0;
3577 ASSERT(!result_reg.is(d0));
3578
3579 Label load_smi, heap_number, done;
3580
3581 // Smi check.
3582 __ tst(input_reg, Operand(kSmiTagMask));
3583 __ b(eq, &load_smi);
3584
3585 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003586 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003587 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003588 __ cmp(scratch, Operand(ip));
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003589 if (deoptimize_on_undefined) {
3590 DeoptimizeIf(ne, env);
3591 } else {
3592 Label heap_number;
3593 __ b(eq, &heap_number);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003594
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003595 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3596 __ cmp(input_reg, Operand(ip));
3597 DeoptimizeIf(ne, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003598
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003599 // Convert undefined to NaN.
3600 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3601 __ sub(ip, ip, Operand(kHeapObjectTag));
3602 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3603 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003604
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003605 __ bind(&heap_number);
3606 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003607 // Heap number to double register conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003608 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3609 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3610 __ jmp(&done);
3611
3612 // Smi to double register conversion
3613 __ bind(&load_smi);
3614 __ SmiUntag(input_reg); // Untag smi before converting to float.
3615 __ vmov(flt_scratch, input_reg);
3616 __ vcvt_f64_s32(result_reg, flt_scratch);
3617 __ SmiTag(input_reg); // Retag smi.
3618 __ bind(&done);
3619}
3620
3621
3622class DeferredTaggedToI: public LDeferredCode {
3623 public:
3624 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3625 : LDeferredCode(codegen), instr_(instr) { }
3626 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3627 private:
3628 LTaggedToI* instr_;
3629};
3630
3631
3632void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003633 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003634 Register scratch1 = scratch0();
3635 Register scratch2 = ToRegister(instr->TempAt(0));
3636 DwVfpRegister double_scratch = double_scratch0();
3637 SwVfpRegister single_scratch = double_scratch.low();
3638
3639 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3640 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3641
3642 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003643
3644 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01003645 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003646 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01003647 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003648
3649 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003650 Register scratch3 = ToRegister(instr->TempAt(1));
3651 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3652 ASSERT(!scratch3.is(input_reg) &&
3653 !scratch3.is(scratch1) &&
3654 !scratch3.is(scratch2));
3655 // Performs a truncating conversion of a floating point number as used by
3656 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003657 Label heap_number;
3658 __ b(eq, &heap_number);
3659 // Check for undefined. Undefined is converted to zero for truncating
3660 // conversions.
3661 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3662 __ cmp(input_reg, Operand(ip));
3663 DeoptimizeIf(ne, instr->environment());
3664 __ mov(input_reg, Operand(0));
3665 __ b(&done);
3666
3667 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01003668 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3669 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3670
3671 __ EmitECMATruncate(input_reg,
3672 double_scratch2,
3673 single_scratch,
3674 scratch1,
3675 scratch2,
3676 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003677
3678 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003679 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003680 // Deoptimize if we don't have a heap number.
3681 DeoptimizeIf(ne, instr->environment());
3682
3683 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003684 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3685 __ EmitVFPTruncate(kRoundToZero,
3686 single_scratch,
3687 double_scratch,
3688 scratch1,
3689 scratch2,
3690 kCheckForInexactConversion);
3691 DeoptimizeIf(ne, instr->environment());
3692 // Load the result.
3693 __ vmov(input_reg, single_scratch);
3694
Ben Murdochb0fe1622011-05-05 13:52:32 +01003695 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003696 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003697 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01003698 __ vmov(scratch1, double_scratch.high());
3699 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003700 DeoptimizeIf(ne, instr->environment());
3701 }
3702 }
3703 __ bind(&done);
3704}
3705
3706
3707void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003708 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003709 ASSERT(input->IsRegister());
3710 ASSERT(input->Equals(instr->result()));
3711
3712 Register input_reg = ToRegister(input);
3713
3714 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3715
3716 // Smi check.
3717 __ tst(input_reg, Operand(kSmiTagMask));
3718 __ b(ne, deferred->entry());
3719
3720 // Smi to int32 conversion
3721 __ SmiUntag(input_reg); // Untag smi.
3722
3723 __ bind(deferred->exit());
3724}
3725
3726
3727void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003728 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003729 ASSERT(input->IsRegister());
3730 LOperand* result = instr->result();
3731 ASSERT(result->IsDoubleRegister());
3732
3733 Register input_reg = ToRegister(input);
3734 DoubleRegister result_reg = ToDoubleRegister(result);
3735
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003736 EmitNumberUntagD(input_reg, result_reg,
3737 instr->hydrogen()->deoptimize_on_undefined(),
3738 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003739}
3740
3741
3742void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003743 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003744 Register scratch1 = scratch0();
3745 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003746 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
3747 DwVfpRegister double_scratch = double_scratch0();
3748 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003749
Steve Block44f0eee2011-05-26 01:26:41 +01003750 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003751
Steve Block44f0eee2011-05-26 01:26:41 +01003752 if (instr->truncating()) {
3753 Register scratch3 = ToRegister(instr->TempAt(1));
3754 __ EmitECMATruncate(result_reg,
3755 double_input,
3756 single_scratch,
3757 scratch1,
3758 scratch2,
3759 scratch3);
3760 } else {
3761 VFPRoundingMode rounding_mode = kRoundToMinusInf;
3762 __ EmitVFPTruncate(rounding_mode,
3763 single_scratch,
3764 double_input,
3765 scratch1,
3766 scratch2,
3767 kCheckForInexactConversion);
3768 // Deoptimize if we had a vfp invalid exception,
3769 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01003770 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003771 // Retrieve the result.
3772 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003773 }
Steve Block44f0eee2011-05-26 01:26:41 +01003774 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003775}
3776
3777
3778void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003779 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003780 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003781 DeoptimizeIf(ne, instr->environment());
3782}
3783
3784
3785void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3786 LOperand* input = instr->InputAt(0);
3787 __ tst(ToRegister(input), Operand(kSmiTagMask));
3788 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003789}
3790
3791
3792void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003793 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003794 Register scratch = scratch0();
3795 InstanceType first = instr->hydrogen()->first();
3796 InstanceType last = instr->hydrogen()->last();
3797
3798 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3799 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3800 __ cmp(scratch, Operand(first));
3801
3802 // If there is only one type in the interval check for equality.
3803 if (first == last) {
3804 DeoptimizeIf(ne, instr->environment());
3805 } else {
3806 DeoptimizeIf(lo, instr->environment());
3807 // Omit check for the last type.
3808 if (last != LAST_TYPE) {
3809 __ cmp(scratch, Operand(last));
3810 DeoptimizeIf(hi, instr->environment());
3811 }
3812 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003813}
3814
3815
3816void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003817 ASSERT(instr->InputAt(0)->IsRegister());
3818 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003819 __ cmp(reg, Operand(instr->hydrogen()->target()));
3820 DeoptimizeIf(ne, instr->environment());
3821}
3822
3823
3824void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003825 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01003826 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003827 ASSERT(input->IsRegister());
3828 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01003829 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
3830 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003831 DeoptimizeIf(ne, instr->environment());
3832}
3833
3834
Ben Murdochb8e0da22011-05-16 14:20:40 +01003835void LCodeGen::LoadHeapObject(Register result,
3836 Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01003837 if (heap()->InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01003838 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01003839 factory()->NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01003840 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01003841 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01003842 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003843 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01003844 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003845}
3846
3847
3848void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003849 Register temp1 = ToRegister(instr->TempAt(0));
3850 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01003851
3852 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01003853 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01003854
3855 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003856 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01003857
3858 // Check prototype maps up to the holder.
3859 while (!current_prototype.is_identical_to(holder)) {
3860 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
3861 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
3862 DeoptimizeIf(ne, instr->environment());
3863 current_prototype =
3864 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3865 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003866 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01003867 }
3868
3869 // Check the holder map.
3870 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
3871 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
3872 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003873}
3874
3875
3876void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003877 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3878 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
3879 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3880 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
3881 __ Push(r3, r2, r1);
3882
3883 // Pick the right runtime function or stub to call.
3884 int length = instr->hydrogen()->length();
3885 if (instr->hydrogen()->IsCopyOnWrite()) {
3886 ASSERT(instr->hydrogen()->depth() == 1);
3887 FastCloneShallowArrayStub::Mode mode =
3888 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3889 FastCloneShallowArrayStub stub(mode, length);
3890 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3891 } else if (instr->hydrogen()->depth() > 1) {
3892 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3893 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3894 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3895 } else {
3896 FastCloneShallowArrayStub::Mode mode =
3897 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3898 FastCloneShallowArrayStub stub(mode, length);
3899 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3900 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003901}
3902
3903
3904void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003905 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3906 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
3907 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3908 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
3909 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3910 __ Push(r4, r3, r2, r1);
3911
3912 // Pick the right runtime function to call.
3913 if (instr->hydrogen()->depth() > 1) {
3914 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3915 } else {
3916 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3917 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003918}
3919
3920
Steve Block44f0eee2011-05-26 01:26:41 +01003921void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3922 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
3923 __ push(r0);
3924 CallRuntime(Runtime::kToFastProperties, 1, instr);
3925}
3926
3927
Ben Murdochb0fe1622011-05-05 13:52:32 +01003928void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003929 Label materialized;
3930 // Registers will be used as follows:
3931 // r3 = JS function.
3932 // r7 = literals array.
3933 // r1 = regexp literal.
3934 // r0 = regexp literal clone.
3935 // r2 and r4-r6 are used as temporaries.
3936 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3937 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
3938 int literal_offset = FixedArray::kHeaderSize +
3939 instr->hydrogen()->literal_index() * kPointerSize;
3940 __ ldr(r1, FieldMemOperand(r7, literal_offset));
3941 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3942 __ cmp(r1, ip);
3943 __ b(ne, &materialized);
3944
3945 // Create regexp literal using runtime function
3946 // Result will be in r0.
3947 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3948 __ mov(r5, Operand(instr->hydrogen()->pattern()));
3949 __ mov(r4, Operand(instr->hydrogen()->flags()));
3950 __ Push(r7, r6, r5, r4);
3951 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3952 __ mov(r1, r0);
3953
3954 __ bind(&materialized);
3955 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3956 Label allocated, runtime_allocate;
3957
3958 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
3959 __ jmp(&allocated);
3960
3961 __ bind(&runtime_allocate);
3962 __ mov(r0, Operand(Smi::FromInt(size)));
3963 __ Push(r1, r0);
3964 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3965 __ pop(r1);
3966
3967 __ bind(&allocated);
3968 // Copy the content into the newly allocated memory.
3969 // (Unroll copy loop once for better throughput).
3970 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3971 __ ldr(r3, FieldMemOperand(r1, i));
3972 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
3973 __ str(r3, FieldMemOperand(r0, i));
3974 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
3975 }
3976 if ((size % (2 * kPointerSize)) != 0) {
3977 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
3978 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
3979 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003980}
3981
3982
3983void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003984 // Use the fast case closure allocation code that allocates in new
3985 // space for nested functions that don't need literals cloning.
3986 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01003987 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01003988 if (!pretenure && shared_info->num_literals() == 0) {
3989 FastNewClosureStub stub(
3990 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003991 __ mov(r1, Operand(shared_info));
3992 __ push(r1);
3993 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3994 } else {
3995 __ mov(r2, Operand(shared_info));
3996 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01003997 ? factory()->true_value()
3998 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003999 __ Push(cp, r2, r1);
4000 CallRuntime(Runtime::kNewClosure, 3, instr);
4001 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004002}
4003
4004
4005void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004006 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004007 __ push(input);
4008 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004009}
4010
4011
4012void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004013 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004014 Register result = ToRegister(instr->result());
4015 Label true_label;
4016 Label false_label;
4017 Label done;
4018
4019 Condition final_branch_condition = EmitTypeofIs(&true_label,
4020 &false_label,
4021 input,
4022 instr->type_literal());
4023 __ b(final_branch_condition, &true_label);
4024 __ bind(&false_label);
4025 __ LoadRoot(result, Heap::kFalseValueRootIndex);
4026 __ b(&done);
4027
4028 __ bind(&true_label);
4029 __ LoadRoot(result, Heap::kTrueValueRootIndex);
4030
4031 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004032}
4033
4034
4035void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004036 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004037 int true_block = chunk_->LookupDestination(instr->true_block_id());
4038 int false_block = chunk_->LookupDestination(instr->false_block_id());
4039 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4040 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4041
4042 Condition final_branch_condition = EmitTypeofIs(true_label,
4043 false_label,
4044 input,
4045 instr->type_literal());
4046
4047 EmitBranch(true_block, false_block, final_branch_condition);
4048}
4049
4050
4051Condition LCodeGen::EmitTypeofIs(Label* true_label,
4052 Label* false_label,
4053 Register input,
4054 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004055 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004056 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004057 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004058 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004059 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4060 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4061 __ cmp(input, Operand(ip));
4062 final_branch_condition = eq;
4063
Steve Block44f0eee2011-05-26 01:26:41 +01004064 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004065 __ JumpIfSmi(input, false_label);
4066 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4067 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004068 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4069 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004070 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004071
Steve Block44f0eee2011-05-26 01:26:41 +01004072 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004073 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004074 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004075 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004076 final_branch_condition = eq;
4077
Steve Block44f0eee2011-05-26 01:26:41 +01004078 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004079 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004080 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004081 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004082 // Check for undetectable objects => true.
4083 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4084 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4085 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4086 final_branch_condition = ne;
4087
Steve Block44f0eee2011-05-26 01:26:41 +01004088 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004089 __ JumpIfSmi(input, false_label);
4090 __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4091 final_branch_condition = ge;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004092
Steve Block44f0eee2011-05-26 01:26:41 +01004093 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004094 __ JumpIfSmi(input, false_label);
4095 __ CompareRoot(input, Heap::kNullValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004096 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004097 __ CompareObjectType(input, input, scratch, FIRST_JS_OBJECT_TYPE);
4098 __ b(lo, false_label);
4099 __ CompareInstanceType(input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4100 __ b(hs, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004101 // Check for undetectable objects => false.
4102 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4103 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004104 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004105
4106 } else {
4107 final_branch_condition = ne;
4108 __ b(false_label);
4109 // A dead branch instruction will be generated after this point.
4110 }
4111
4112 return final_branch_condition;
4113}
4114
4115
Steve Block1e0659c2011-05-24 12:43:12 +01004116void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4117 Register result = ToRegister(instr->result());
4118 Label true_label;
4119 Label false_label;
4120 Label done;
4121
4122 EmitIsConstructCall(result, scratch0());
4123 __ b(eq, &true_label);
4124
4125 __ LoadRoot(result, Heap::kFalseValueRootIndex);
4126 __ b(&done);
4127
4128
4129 __ bind(&true_label);
4130 __ LoadRoot(result, Heap::kTrueValueRootIndex);
4131
4132 __ bind(&done);
4133}
4134
4135
4136void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4137 Register temp1 = ToRegister(instr->TempAt(0));
4138 int true_block = chunk_->LookupDestination(instr->true_block_id());
4139 int false_block = chunk_->LookupDestination(instr->false_block_id());
4140
4141 EmitIsConstructCall(temp1, scratch0());
4142 EmitBranch(true_block, false_block, eq);
4143}
4144
4145
4146void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4147 ASSERT(!temp1.is(temp2));
4148 // Get the frame pointer for the calling frame.
4149 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4150
4151 // Skip the arguments adaptor frame if it exists.
4152 Label check_frame_marker;
4153 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4154 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4155 __ b(ne, &check_frame_marker);
4156 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4157
4158 // Check the marker in the calling frame.
4159 __ bind(&check_frame_marker);
4160 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4161 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4162}
4163
4164
Ben Murdochb0fe1622011-05-05 13:52:32 +01004165void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4166 // No code for lazy bailout instruction. Used to capture environment after a
4167 // call for populating the safepoint data with deoptimization data.
4168}
4169
4170
4171void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004172 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004173}
4174
4175
4176void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004177 Register object = ToRegister(instr->object());
4178 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004179 Register strict = scratch0();
4180 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4181 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004182 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4183 LPointerMap* pointers = instr->pointer_map();
4184 LEnvironment* env = instr->deoptimization_environment();
4185 RecordPosition(pointers->position());
4186 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01004187 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01004188 pointers,
4189 env->deoptimization_index());
Ben Murdochb8e0da22011-05-16 14:20:40 +01004190 __ InvokeBuiltin(Builtins::DELETE, CALL_JS, &safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004191}
4192
4193
4194void LCodeGen::DoStackCheck(LStackCheck* instr) {
4195 // Perform stack overflow check.
4196 Label ok;
4197 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4198 __ cmp(sp, Operand(ip));
4199 __ b(hs, &ok);
4200 StackCheckStub stub;
4201 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4202 __ bind(&ok);
4203}
4204
4205
4206void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004207 // This is a pseudo-instruction that ensures that the environment here is
4208 // properly registered for deoptimization and records the assembler's PC
4209 // offset.
4210 LEnvironment* environment = instr->environment();
4211 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4212 instr->SpilledDoubleRegisterArray());
4213
4214 // If the environment were already registered, we would have no way of
4215 // backpatching it with the spill slot operands.
4216 ASSERT(!environment->HasBeenRegistered());
4217 RegisterEnvironmentForDeoptimization(environment);
4218 ASSERT(osr_pc_offset_ == -1);
4219 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004220}
4221
4222
4223#undef __
4224
4225} } // namespace v8::internal