blob: 800557084bbf2759268f8ed7a3ae5eb6cf5843c4 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
43 int deoptimization_index)
44 : codegen_(codegen),
45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { }
47 virtual ~SafepointGenerator() { }
48
Steve Block44f0eee2011-05-26 01:26:41 +010049 virtual void BeforeCall(int call_size) {
50 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65
66 virtual void AfterCall() {
Ben Murdochb0fe1622011-05-05 13:52:32 +010067 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
68 }
69
70 private:
71 LCodeGen* codegen_;
72 LPointerMap* pointers_;
73 int deoptimization_index_;
74};
75
76
77#define __ masm()->
78
79bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused());
82 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3);
84 CpuFeatures::Scope scope2(ARMv7);
85 return GeneratePrologue() &&
86 GenerateBody() &&
87 GenerateDeferredCode() &&
88 GenerateSafepointTable();
89}
90
91
92void LCodeGen::FinishCode(Handle<Code> code) {
93 ASSERT(is_done());
Steve Block053d10c2011-06-13 19:13:29 +010094 code->set_stack_slots(StackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010095 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010096 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010097 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010098}
99
100
101void LCodeGen::Abort(const char* format, ...) {
102 if (FLAG_trace_bailout) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100103 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
104 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100105 va_list arguments;
106 va_start(arguments, format);
107 OS::VPrint(format, arguments);
108 va_end(arguments);
109 PrintF("\n");
110 }
111 status_ = ABORTED;
112}
113
114
115void LCodeGen::Comment(const char* format, ...) {
116 if (!FLAG_code_comments) return;
117 char buffer[4 * KB];
118 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
119 va_list arguments;
120 va_start(arguments, format);
121 builder.AddFormattedList(format, arguments);
122 va_end(arguments);
123
124 // Copy the string before recording it in the assembler to avoid
125 // issues when the stack allocated buffer goes out of scope.
126 size_t length = builder.position();
127 Vector<char> copy = Vector<char>::New(length + 1);
128 memcpy(copy.start(), builder.Finalize(), copy.length());
129 masm()->RecordComment(copy.start());
130}
131
132
133bool LCodeGen::GeneratePrologue() {
134 ASSERT(is_generating());
135
136#ifdef DEBUG
137 if (strlen(FLAG_stop_at) > 0 &&
138 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
139 __ stop("stop_at");
140 }
141#endif
142
143 // r1: Callee's JS function.
144 // cp: Callee's context.
145 // fp: Caller's frame pointer.
146 // lr: Caller's pc.
147
148 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
149 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
150
151 // Reserve space for the stack slots needed by the code.
Steve Block053d10c2011-06-13 19:13:29 +0100152 int slots = StackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100153 if (slots > 0) {
154 if (FLAG_debug_code) {
155 __ mov(r0, Operand(slots));
156 __ mov(r2, Operand(kSlotsZapValue));
157 Label loop;
158 __ bind(&loop);
159 __ push(r2);
160 __ sub(r0, r0, Operand(1), SetCC);
161 __ b(ne, &loop);
162 } else {
163 __ sub(sp, sp, Operand(slots * kPointerSize));
164 }
165 }
166
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100167 // Possibly allocate a local context.
168 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
169 if (heap_slots > 0) {
170 Comment(";;; Allocate local context");
171 // Argument to NewContext is the function, which is in r1.
172 __ push(r1);
173 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
174 FastNewContextStub stub(heap_slots);
175 __ CallStub(&stub);
176 } else {
177 __ CallRuntime(Runtime::kNewContext, 1);
178 }
179 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
180 // Context is returned in both r0 and cp. It replaces the context
181 // passed to us. It's saved in the stack and kept live in cp.
182 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
183 // Copy any necessary parameters into the context.
184 int num_parameters = scope()->num_parameters();
185 for (int i = 0; i < num_parameters; i++) {
186 Slot* slot = scope()->parameter(i)->AsSlot();
187 if (slot != NULL && slot->type() == Slot::CONTEXT) {
188 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
189 (num_parameters - 1 - i) * kPointerSize;
190 // Load parameter from stack.
191 __ ldr(r0, MemOperand(fp, parameter_offset));
192 // Store it in the context.
193 __ mov(r1, Operand(Context::SlotOffset(slot->index())));
194 __ str(r0, MemOperand(cp, r1));
195 // Update the write barrier. This clobbers all involved
196 // registers, so we have to use two more registers to avoid
197 // clobbering cp.
198 __ mov(r2, Operand(cp));
199 __ RecordWrite(r2, Operand(r1), r3, r0);
200 }
201 }
202 Comment(";;; End allocate local context");
203 }
204
Ben Murdochb0fe1622011-05-05 13:52:32 +0100205 // Trace the call.
206 if (FLAG_trace) {
207 __ CallRuntime(Runtime::kTraceEnter, 0);
208 }
209 return !is_aborted();
210}
211
212
213bool LCodeGen::GenerateBody() {
214 ASSERT(is_generating());
215 bool emit_instructions = true;
216 for (current_instruction_ = 0;
217 !is_aborted() && current_instruction_ < instructions_->length();
218 current_instruction_++) {
219 LInstruction* instr = instructions_->at(current_instruction_);
220 if (instr->IsLabel()) {
221 LLabel* label = LLabel::cast(instr);
222 emit_instructions = !label->HasReplacement();
223 }
224
225 if (emit_instructions) {
226 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
227 instr->CompileToNative(this);
228 }
229 }
230 return !is_aborted();
231}
232
233
234LInstruction* LCodeGen::GetNextInstruction() {
235 if (current_instruction_ < instructions_->length() - 1) {
236 return instructions_->at(current_instruction_ + 1);
237 } else {
238 return NULL;
239 }
240}
241
242
243bool LCodeGen::GenerateDeferredCode() {
244 ASSERT(is_generating());
245 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
246 LDeferredCode* code = deferred_[i];
247 __ bind(code->entry());
248 code->Generate();
249 __ jmp(code->exit());
250 }
251
Ben Murdochb8e0da22011-05-16 14:20:40 +0100252 // Force constant pool emission at the end of deferred code to make
253 // sure that no constant pools are emitted after the official end of
254 // the instruction sequence.
255 masm()->CheckConstPool(true, false);
256
Ben Murdochb0fe1622011-05-05 13:52:32 +0100257 // Deferred code is the last part of the instruction sequence. Mark
258 // the generated code as done unless we bailed out.
259 if (!is_aborted()) status_ = DONE;
260 return !is_aborted();
261}
262
263
264bool LCodeGen::GenerateSafepointTable() {
265 ASSERT(is_done());
Steve Block053d10c2011-06-13 19:13:29 +0100266 safepoints_.Emit(masm(), StackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100267 return !is_aborted();
268}
269
270
271Register LCodeGen::ToRegister(int index) const {
272 return Register::FromAllocationIndex(index);
273}
274
275
276DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
277 return DoubleRegister::FromAllocationIndex(index);
278}
279
280
281Register LCodeGen::ToRegister(LOperand* op) const {
282 ASSERT(op->IsRegister());
283 return ToRegister(op->index());
284}
285
286
287Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
288 if (op->IsRegister()) {
289 return ToRegister(op->index());
290 } else if (op->IsConstantOperand()) {
291 __ mov(scratch, ToOperand(op));
292 return scratch;
293 } else if (op->IsStackSlot() || op->IsArgument()) {
294 __ ldr(scratch, ToMemOperand(op));
295 return scratch;
296 }
297 UNREACHABLE();
298 return scratch;
299}
300
301
302DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
303 ASSERT(op->IsDoubleRegister());
304 return ToDoubleRegister(op->index());
305}
306
307
308DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
309 SwVfpRegister flt_scratch,
310 DoubleRegister dbl_scratch) {
311 if (op->IsDoubleRegister()) {
312 return ToDoubleRegister(op->index());
313 } else if (op->IsConstantOperand()) {
314 LConstantOperand* const_op = LConstantOperand::cast(op);
315 Handle<Object> literal = chunk_->LookupLiteral(const_op);
316 Representation r = chunk_->LookupLiteralRepresentation(const_op);
317 if (r.IsInteger32()) {
318 ASSERT(literal->IsNumber());
319 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
320 __ vmov(flt_scratch, ip);
321 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
322 return dbl_scratch;
323 } else if (r.IsDouble()) {
324 Abort("unsupported double immediate");
325 } else if (r.IsTagged()) {
326 Abort("unsupported tagged immediate");
327 }
328 } else if (op->IsStackSlot() || op->IsArgument()) {
329 // TODO(regis): Why is vldr not taking a MemOperand?
330 // __ vldr(dbl_scratch, ToMemOperand(op));
331 MemOperand mem_op = ToMemOperand(op);
332 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
333 return dbl_scratch;
334 }
335 UNREACHABLE();
336 return dbl_scratch;
337}
338
339
340int LCodeGen::ToInteger32(LConstantOperand* op) const {
341 Handle<Object> value = chunk_->LookupLiteral(op);
342 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
343 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
344 value->Number());
345 return static_cast<int32_t>(value->Number());
346}
347
348
349Operand LCodeGen::ToOperand(LOperand* op) {
350 if (op->IsConstantOperand()) {
351 LConstantOperand* const_op = LConstantOperand::cast(op);
352 Handle<Object> literal = chunk_->LookupLiteral(const_op);
353 Representation r = chunk_->LookupLiteralRepresentation(const_op);
354 if (r.IsInteger32()) {
355 ASSERT(literal->IsNumber());
356 return Operand(static_cast<int32_t>(literal->Number()));
357 } else if (r.IsDouble()) {
358 Abort("ToOperand Unsupported double immediate.");
359 }
360 ASSERT(r.IsTagged());
361 return Operand(literal);
362 } else if (op->IsRegister()) {
363 return Operand(ToRegister(op));
364 } else if (op->IsDoubleRegister()) {
365 Abort("ToOperand IsDoubleRegister unimplemented");
366 return Operand(0);
367 }
368 // Stack slots not implemented, use ToMemOperand instead.
369 UNREACHABLE();
370 return Operand(0);
371}
372
373
374MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100375 ASSERT(!op->IsRegister());
376 ASSERT(!op->IsDoubleRegister());
377 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
378 int index = op->index();
379 if (index >= 0) {
380 // Local or spill slot. Skip the frame pointer, function, and
381 // context in the fixed part of the frame.
382 return MemOperand(fp, -(index + 3) * kPointerSize);
383 } else {
384 // Incoming parameter. Skip the return address.
385 return MemOperand(fp, -(index - 1) * kPointerSize);
386 }
387}
388
389
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100390MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
391 ASSERT(op->IsDoubleStackSlot());
392 int index = op->index();
393 if (index >= 0) {
394 // Local or spill slot. Skip the frame pointer, function, context,
395 // and the first word of the double in the fixed part of the frame.
396 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
397 } else {
398 // Incoming parameter. Skip the return address and the first word of
399 // the double.
400 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
401 }
402}
403
404
Ben Murdochb8e0da22011-05-16 14:20:40 +0100405void LCodeGen::WriteTranslation(LEnvironment* environment,
406 Translation* translation) {
407 if (environment == NULL) return;
408
409 // The translation includes one command per value in the environment.
410 int translation_size = environment->values()->length();
411 // The output frame height does not include the parameters.
412 int height = translation_size - environment->parameter_count();
413
414 WriteTranslation(environment->outer(), translation);
415 int closure_id = DefineDeoptimizationLiteral(environment->closure());
416 translation->BeginFrame(environment->ast_id(), closure_id, height);
417 for (int i = 0; i < translation_size; ++i) {
418 LOperand* value = environment->values()->at(i);
419 // spilled_registers_ and spilled_double_registers_ are either
420 // both NULL or both set.
421 if (environment->spilled_registers() != NULL && value != NULL) {
422 if (value->IsRegister() &&
423 environment->spilled_registers()[value->index()] != NULL) {
424 translation->MarkDuplicate();
425 AddToTranslation(translation,
426 environment->spilled_registers()[value->index()],
427 environment->HasTaggedValueAt(i));
428 } else if (
429 value->IsDoubleRegister() &&
430 environment->spilled_double_registers()[value->index()] != NULL) {
431 translation->MarkDuplicate();
432 AddToTranslation(
433 translation,
434 environment->spilled_double_registers()[value->index()],
435 false);
436 }
437 }
438
439 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
440 }
441}
442
443
Ben Murdochb0fe1622011-05-05 13:52:32 +0100444void LCodeGen::AddToTranslation(Translation* translation,
445 LOperand* op,
446 bool is_tagged) {
447 if (op == NULL) {
448 // TODO(twuerthinger): Introduce marker operands to indicate that this value
449 // is not present and must be reconstructed from the deoptimizer. Currently
450 // this is only used for the arguments object.
451 translation->StoreArgumentsObject();
452 } else if (op->IsStackSlot()) {
453 if (is_tagged) {
454 translation->StoreStackSlot(op->index());
455 } else {
456 translation->StoreInt32StackSlot(op->index());
457 }
458 } else if (op->IsDoubleStackSlot()) {
459 translation->StoreDoubleStackSlot(op->index());
460 } else if (op->IsArgument()) {
461 ASSERT(is_tagged);
Steve Block053d10c2011-06-13 19:13:29 +0100462 int src_index = StackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100463 translation->StoreStackSlot(src_index);
464 } else if (op->IsRegister()) {
465 Register reg = ToRegister(op);
466 if (is_tagged) {
467 translation->StoreRegister(reg);
468 } else {
469 translation->StoreInt32Register(reg);
470 }
471 } else if (op->IsDoubleRegister()) {
472 DoubleRegister reg = ToDoubleRegister(op);
473 translation->StoreDoubleRegister(reg);
474 } else if (op->IsConstantOperand()) {
475 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
476 int src_index = DefineDeoptimizationLiteral(literal);
477 translation->StoreLiteral(src_index);
478 } else {
479 UNREACHABLE();
480 }
481}
482
483
484void LCodeGen::CallCode(Handle<Code> code,
485 RelocInfo::Mode mode,
486 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100487 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
488}
489
490
491void LCodeGen::CallCodeGeneric(Handle<Code> code,
492 RelocInfo::Mode mode,
493 LInstruction* instr,
494 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100495 ASSERT(instr != NULL);
496 LPointerMap* pointers = instr->pointer_map();
497 RecordPosition(pointers->position());
498 __ Call(code, mode);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100499 RegisterLazyDeoptimization(instr, safepoint_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100500}
501
502
Steve Block44f0eee2011-05-26 01:26:41 +0100503void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100504 int num_arguments,
505 LInstruction* instr) {
506 ASSERT(instr != NULL);
507 LPointerMap* pointers = instr->pointer_map();
508 ASSERT(pointers != NULL);
509 RecordPosition(pointers->position());
510
511 __ CallRuntime(function, num_arguments);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100512 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100513}
514
515
Ben Murdoch8b112d22011-06-08 16:22:53 +0100516void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
517 int argc,
518 LInstruction* instr) {
519 __ CallRuntimeSaveDoubles(id);
520 RecordSafepointWithRegisters(
521 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
522}
523
524
525void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
526 SafepointMode safepoint_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100527 // Create the environment to bailout to. If the call has side effects
528 // execution has to continue after the call otherwise execution can continue
529 // from a previous bailout point repeating the call.
530 LEnvironment* deoptimization_environment;
531 if (instr->HasDeoptimizationEnvironment()) {
532 deoptimization_environment = instr->deoptimization_environment();
533 } else {
534 deoptimization_environment = instr->environment();
535 }
536
537 RegisterEnvironmentForDeoptimization(deoptimization_environment);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100538 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
539 RecordSafepoint(instr->pointer_map(),
540 deoptimization_environment->deoptimization_index());
541 } else {
542 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
543 RecordSafepointWithRegisters(
544 instr->pointer_map(),
545 0,
546 deoptimization_environment->deoptimization_index());
547 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100548}
549
550
551void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
552 if (!environment->HasBeenRegistered()) {
553 // Physical stack frame layout:
554 // -x ............. -4 0 ..................................... y
555 // [incoming arguments] [spill slots] [pushed outgoing arguments]
556
557 // Layout of the environment:
558 // 0 ..................................................... size-1
559 // [parameters] [locals] [expression stack including arguments]
560
561 // Layout of the translation:
562 // 0 ........................................................ size - 1 + 4
563 // [expression stack including arguments] [locals] [4 words] [parameters]
564 // |>------------ translation_size ------------<|
565
566 int frame_count = 0;
567 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
568 ++frame_count;
569 }
570 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100571 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100572 int deoptimization_index = deoptimizations_.length();
573 environment->Register(deoptimization_index, translation.index());
574 deoptimizations_.Add(environment);
575 }
576}
577
578
579void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
580 RegisterEnvironmentForDeoptimization(environment);
581 ASSERT(environment->HasBeenRegistered());
582 int id = environment->deoptimization_index();
583 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
584 ASSERT(entry != NULL);
585 if (entry == NULL) {
586 Abort("bailout was not prepared");
587 return;
588 }
589
590 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
591
592 if (FLAG_deopt_every_n_times == 1 &&
593 info_->shared_info()->opt_count() == id) {
594 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
595 return;
596 }
597
Steve Block1e0659c2011-05-24 12:43:12 +0100598 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100599 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt");
600 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
601 } else {
602 if (FLAG_trap_on_deopt) {
603 Label done;
604 __ b(&done, NegateCondition(cc));
605 __ stop("trap_on_deopt");
606 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
607 __ bind(&done);
608 } else {
609 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc);
610 }
611 }
612}
613
614
615void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
616 int length = deoptimizations_.length();
617 if (length == 0) return;
618 ASSERT(FLAG_deopt);
619 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100620 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100621
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100622 Handle<ByteArray> translations = translations_.CreateByteArray();
623 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100624 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
625
626 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100627 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100628 for (int i = 0; i < deoptimization_literals_.length(); i++) {
629 literals->set(i, *deoptimization_literals_[i]);
630 }
631 data->SetLiteralArray(*literals);
632
633 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
634 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
635
636 // Populate the deoptimization entries.
637 for (int i = 0; i < length; i++) {
638 LEnvironment* env = deoptimizations_[i];
639 data->SetAstId(i, Smi::FromInt(env->ast_id()));
640 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
641 data->SetArgumentsStackHeight(i,
642 Smi::FromInt(env->arguments_stack_height()));
643 }
644 code->set_deoptimization_data(*data);
645}
646
647
648int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
649 int result = deoptimization_literals_.length();
650 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
651 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
652 }
653 deoptimization_literals_.Add(literal);
654 return result;
655}
656
657
658void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
659 ASSERT(deoptimization_literals_.length() == 0);
660
661 const ZoneList<Handle<JSFunction> >* inlined_closures =
662 chunk()->inlined_closures();
663
664 for (int i = 0, length = inlined_closures->length();
665 i < length;
666 i++) {
667 DefineDeoptimizationLiteral(inlined_closures->at(i));
668 }
669
670 inlined_function_count_ = deoptimization_literals_.length();
671}
672
673
Steve Block1e0659c2011-05-24 12:43:12 +0100674void LCodeGen::RecordSafepoint(
675 LPointerMap* pointers,
676 Safepoint::Kind kind,
677 int arguments,
678 int deoptimization_index) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100679 ASSERT(expected_safepoint_kind_ == kind);
680
Ben Murdochb0fe1622011-05-05 13:52:32 +0100681 const ZoneList<LOperand*>* operands = pointers->operands();
682 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100683 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100684 for (int i = 0; i < operands->length(); i++) {
685 LOperand* pointer = operands->at(i);
686 if (pointer->IsStackSlot()) {
687 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100688 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
689 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100690 }
691 }
Steve Block1e0659c2011-05-24 12:43:12 +0100692 if (kind & Safepoint::kWithRegisters) {
693 // Register cp always contains a pointer to the context.
694 safepoint.DefinePointerRegister(cp);
695 }
696}
697
698
699void LCodeGen::RecordSafepoint(LPointerMap* pointers,
700 int deoptimization_index) {
701 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100702}
703
704
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100705void LCodeGen::RecordSafepoint(int deoptimization_index) {
706 LPointerMap empty_pointers(RelocInfo::kNoPosition);
707 RecordSafepoint(&empty_pointers, deoptimization_index);
708}
709
710
Ben Murdochb0fe1622011-05-05 13:52:32 +0100711void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
712 int arguments,
713 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100714 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
715 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100716}
717
718
Ben Murdochb8e0da22011-05-16 14:20:40 +0100719void LCodeGen::RecordSafepointWithRegistersAndDoubles(
720 LPointerMap* pointers,
721 int arguments,
722 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100723 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
724 deoptimization_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100725}
726
727
Ben Murdochb0fe1622011-05-05 13:52:32 +0100728void LCodeGen::RecordPosition(int position) {
729 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
730 masm()->positions_recorder()->RecordPosition(position);
731}
732
733
734void LCodeGen::DoLabel(LLabel* label) {
735 if (label->is_loop_header()) {
736 Comment(";;; B%d - LOOP entry", label->block_id());
737 } else {
738 Comment(";;; B%d", label->block_id());
739 }
740 __ bind(label->label());
741 current_block_ = label->block_id();
742 LCodeGen::DoGap(label);
743}
744
745
746void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100747 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100748}
749
750
751void LCodeGen::DoGap(LGap* gap) {
752 for (int i = LGap::FIRST_INNER_POSITION;
753 i <= LGap::LAST_INNER_POSITION;
754 i++) {
755 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
756 LParallelMove* move = gap->GetParallelMove(inner_pos);
757 if (move != NULL) DoParallelMove(move);
758 }
759
760 LInstruction* next = GetNextInstruction();
761 if (next != NULL && next->IsLazyBailout()) {
762 int pc = masm()->pc_offset();
763 safepoints_.SetPcAfterGap(pc);
764 }
765}
766
767
768void LCodeGen::DoParameter(LParameter* instr) {
769 // Nothing to do.
770}
771
772
773void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100774 ASSERT(ToRegister(instr->result()).is(r0));
775 switch (instr->hydrogen()->major_key()) {
776 case CodeStub::RegExpConstructResult: {
777 RegExpConstructResultStub stub;
778 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
779 break;
780 }
781 case CodeStub::RegExpExec: {
782 RegExpExecStub stub;
783 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
784 break;
785 }
786 case CodeStub::SubString: {
787 SubStringStub stub;
788 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
789 break;
790 }
Steve Block9fac8402011-05-12 15:51:54 +0100791 case CodeStub::NumberToString: {
792 NumberToStringStub stub;
793 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
794 break;
795 }
796 case CodeStub::StringAdd: {
797 StringAddStub stub(NO_STRING_ADD_FLAGS);
798 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
799 break;
800 }
801 case CodeStub::StringCompare: {
802 StringCompareStub stub;
803 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
804 break;
805 }
806 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100807 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100808 TranscendentalCacheStub stub(instr->transcendental_type(),
809 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100810 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100811 break;
812 }
813 default:
814 UNREACHABLE();
815 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100816}
817
818
819void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
820 // Nothing to do.
821}
822
823
824void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100825 if (instr->hydrogen()->HasPowerOf2Divisor()) {
826 Register dividend = ToRegister(instr->InputAt(0));
827
828 int32_t divisor =
829 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
830
831 if (divisor < 0) divisor = -divisor;
832
833 Label positive_dividend, done;
834 __ cmp(dividend, Operand(0));
835 __ b(pl, &positive_dividend);
836 __ rsb(dividend, dividend, Operand(0));
837 __ and_(dividend, dividend, Operand(divisor - 1));
838 __ rsb(dividend, dividend, Operand(0), SetCC);
839 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
840 __ b(ne, &done);
841 DeoptimizeIf(al, instr->environment());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +0100842 } else {
843 __ b(&done);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100844 }
Steve Block44f0eee2011-05-26 01:26:41 +0100845 __ bind(&positive_dividend);
846 __ and_(dividend, dividend, Operand(divisor - 1));
847 __ bind(&done);
848 return;
849 }
850
Ben Murdochb8e0da22011-05-16 14:20:40 +0100851 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100852 Register left = ToRegister(instr->InputAt(0));
853 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100854 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100855
Steve Block44f0eee2011-05-26 01:26:41 +0100856 Register scratch = scratch0();
857 Register scratch2 = ToRegister(instr->TempAt(0));
858 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
859 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
860 DwVfpRegister quotient = double_scratch0();
861
862 ASSERT(result.is(left));
863
864 ASSERT(!dividend.is(divisor));
865 ASSERT(!dividend.is(quotient));
866 ASSERT(!divisor.is(quotient));
867 ASSERT(!scratch.is(left));
868 ASSERT(!scratch.is(right));
869 ASSERT(!scratch.is(result));
870
871 Label done, vfp_modulo, both_positive, right_negative;
872
Ben Murdochb8e0da22011-05-16 14:20:40 +0100873 // Check for x % 0.
874 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100875 __ cmp(right, Operand(0));
876 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100877 }
878
Steve Block44f0eee2011-05-26 01:26:41 +0100879 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100880 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100881 __ b(eq, &done);
882 // Preload right in a vfp register.
883 __ vmov(divisor.low(), right);
884 __ b(lt, &vfp_modulo);
885
886 __ cmp(left, Operand(right));
887 __ b(lt, &done);
888
889 // Check for (positive) power of two on the right hand side.
890 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
891 scratch,
892 &right_negative,
893 &both_positive);
894 // Perform modulo operation (scratch contains right - 1).
895 __ and_(result, scratch, Operand(left));
896 __ b(&done);
897
898 __ bind(&right_negative);
899 // Negate right. The sign of the divisor does not matter.
900 __ rsb(right, right, Operand(0));
901
902 __ bind(&both_positive);
903 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100904 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100905 // left hand side, the left hand side is the result.
906 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100907 __ mov(scratch, left);
908 for (int i = 0; i < kUnfolds; i++) {
909 // Check if the left hand side is less or equal than the
910 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100911 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100912 __ mov(result, scratch, LeaveCC, lt);
913 __ b(lt, &done);
914 // If not, reduce the left hand side by the right hand
915 // side and check again.
916 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
917 }
918
Steve Block44f0eee2011-05-26 01:26:41 +0100919 __ bind(&vfp_modulo);
920 // Load the arguments in VFP registers.
921 // The divisor value is preloaded before. Be careful that 'right' is only live
922 // on entry.
923 __ vmov(dividend.low(), left);
924 // From here on don't use right as it may have been reallocated (for example
925 // to scratch2).
926 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +0100927
Steve Block44f0eee2011-05-26 01:26:41 +0100928 __ vcvt_f64_s32(dividend, dividend.low());
929 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100930
Steve Block44f0eee2011-05-26 01:26:41 +0100931 // We do not care about the sign of the divisor.
932 __ vabs(divisor, divisor);
933 // Compute the quotient and round it to a 32bit integer.
934 __ vdiv(quotient, dividend, divisor);
935 __ vcvt_s32_f64(quotient.low(), quotient);
936 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100937
Steve Block44f0eee2011-05-26 01:26:41 +0100938 // Compute the remainder in result.
939 DwVfpRegister double_scratch = dividend;
940 __ vmul(double_scratch, divisor, quotient);
941 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
942 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100943
Steve Block44f0eee2011-05-26 01:26:41 +0100944 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
945 __ sub(result, left, scratch);
946 } else {
947 Label ok;
948 // Check for -0.
949 __ sub(scratch2, left, scratch, SetCC);
950 __ b(ne, &ok);
951 __ cmp(left, Operand(0));
952 DeoptimizeIf(mi, instr->environment());
953 __ bind(&ok);
954 // Load the result and we are done.
955 __ mov(result, scratch2);
956 }
957
Ben Murdochb8e0da22011-05-16 14:20:40 +0100958 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100959}
960
961
962void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100963 class DeferredDivI: public LDeferredCode {
964 public:
965 DeferredDivI(LCodeGen* codegen, LDivI* instr)
966 : LDeferredCode(codegen), instr_(instr) { }
967 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100968 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100969 }
970 private:
971 LDivI* instr_;
972 };
973
Steve Block1e0659c2011-05-24 12:43:12 +0100974 const Register left = ToRegister(instr->InputAt(0));
975 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100976 const Register scratch = scratch0();
977 const Register result = ToRegister(instr->result());
978
979 // Check for x / 0.
980 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100981 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100982 DeoptimizeIf(eq, instr->environment());
983 }
984
985 // Check for (0 / -x) that will produce negative zero.
986 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
987 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +0100988 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100989 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +0100990 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100991 DeoptimizeIf(mi, instr->environment());
992 __ bind(&left_not_zero);
993 }
994
995 // Check for (-kMinInt / -1).
996 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
997 Label left_not_min_int;
998 __ cmp(left, Operand(kMinInt));
999 __ b(ne, &left_not_min_int);
1000 __ cmp(right, Operand(-1));
1001 DeoptimizeIf(eq, instr->environment());
1002 __ bind(&left_not_min_int);
1003 }
1004
1005 Label done, deoptimize;
1006 // Test for a few common cases first.
1007 __ cmp(right, Operand(1));
1008 __ mov(result, left, LeaveCC, eq);
1009 __ b(eq, &done);
1010
1011 __ cmp(right, Operand(2));
1012 __ tst(left, Operand(1), eq);
1013 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1014 __ b(eq, &done);
1015
1016 __ cmp(right, Operand(4));
1017 __ tst(left, Operand(3), eq);
1018 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1019 __ b(eq, &done);
1020
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001021 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001022 // to be tagged to Smis. If that is not possible, deoptimize.
1023 DeferredDivI* deferred = new DeferredDivI(this, instr);
1024
1025 __ TrySmiTag(left, &deoptimize, scratch);
1026 __ TrySmiTag(right, &deoptimize, scratch);
1027
1028 __ b(al, deferred->entry());
1029 __ bind(deferred->exit());
1030
1031 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001032 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001033 __ SmiUntag(result);
1034 __ b(&done);
1035
1036 __ bind(&deoptimize);
1037 DeoptimizeIf(al, instr->environment());
1038 __ bind(&done);
1039}
1040
1041
Steve Block1e0659c2011-05-24 12:43:12 +01001042template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001043void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1044 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001045 Register left = ToRegister(instr->InputAt(0));
1046 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001047
Ben Murdoch8b112d22011-06-08 16:22:53 +01001048 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001049 // Move left to r1 and right to r0 for the stub call.
1050 if (left.is(r1)) {
1051 __ Move(r0, right);
1052 } else if (left.is(r0) && right.is(r1)) {
1053 __ Swap(r0, r1, r2);
1054 } else if (left.is(r0)) {
1055 ASSERT(!right.is(r1));
1056 __ mov(r1, r0);
1057 __ mov(r0, right);
1058 } else {
1059 ASSERT(!left.is(r0) && !right.is(r0));
1060 __ mov(r0, right);
1061 __ mov(r1, left);
1062 }
1063 TypeRecordingBinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001064 __ CallStub(&stub);
1065 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1066 0,
1067 Safepoint::kNoDeoptimizationIndex);
1068 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001069 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001070}
1071
1072
1073void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001074 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001075 Register left = ToRegister(instr->InputAt(0));
1076 Register right = EmitLoadRegister(instr->InputAt(1), scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001077
1078 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
Steve Block1e0659c2011-05-24 12:43:12 +01001079 !instr->InputAt(1)->IsConstantOperand()) {
1080 __ orr(ToRegister(instr->TempAt(0)), left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001081 }
1082
1083 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1084 // scratch:left = left * right.
Steve Block1e0659c2011-05-24 12:43:12 +01001085 __ smull(left, scratch, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001086 __ mov(ip, Operand(left, ASR, 31));
1087 __ cmp(ip, Operand(scratch));
1088 DeoptimizeIf(ne, instr->environment());
1089 } else {
1090 __ mul(left, left, right);
1091 }
1092
1093 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1094 // Bail out if the result is supposed to be negative zero.
1095 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01001096 __ cmp(left, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001097 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001098 if (instr->InputAt(1)->IsConstantOperand()) {
1099 if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) <= 0) {
1100 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001101 }
1102 } else {
1103 // Test the non-zero operand for negative sign.
Steve Block1e0659c2011-05-24 12:43:12 +01001104 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001105 DeoptimizeIf(mi, instr->environment());
1106 }
1107 __ bind(&done);
1108 }
1109}
1110
1111
1112void LCodeGen::DoBitI(LBitI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001113 LOperand* left = instr->InputAt(0);
1114 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001115 ASSERT(left->Equals(instr->result()));
1116 ASSERT(left->IsRegister());
1117 Register result = ToRegister(left);
Steve Block44f0eee2011-05-26 01:26:41 +01001118 Operand right_operand(no_reg);
1119
1120 if (right->IsStackSlot() || right->IsArgument()) {
1121 Register right_reg = EmitLoadRegister(right, ip);
1122 right_operand = Operand(right_reg);
1123 } else {
1124 ASSERT(right->IsRegister() || right->IsConstantOperand());
1125 right_operand = ToOperand(right);
1126 }
1127
Ben Murdochb0fe1622011-05-05 13:52:32 +01001128 switch (instr->op()) {
1129 case Token::BIT_AND:
Steve Block44f0eee2011-05-26 01:26:41 +01001130 __ and_(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001131 break;
1132 case Token::BIT_OR:
Steve Block44f0eee2011-05-26 01:26:41 +01001133 __ orr(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001134 break;
1135 case Token::BIT_XOR:
Steve Block44f0eee2011-05-26 01:26:41 +01001136 __ eor(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001137 break;
1138 default:
1139 UNREACHABLE();
1140 break;
1141 }
1142}
1143
1144
1145void LCodeGen::DoShiftI(LShiftI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001146 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001147 LOperand* left = instr->InputAt(0);
1148 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001149 ASSERT(left->Equals(instr->result()));
1150 ASSERT(left->IsRegister());
1151 Register result = ToRegister(left);
1152 if (right->IsRegister()) {
1153 // Mask the right operand.
Steve Block9fac8402011-05-12 15:51:54 +01001154 __ and_(scratch, ToRegister(right), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001155 switch (instr->op()) {
1156 case Token::SAR:
Steve Block9fac8402011-05-12 15:51:54 +01001157 __ mov(result, Operand(result, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001158 break;
1159 case Token::SHR:
1160 if (instr->can_deopt()) {
Steve Block9fac8402011-05-12 15:51:54 +01001161 __ mov(result, Operand(result, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001162 DeoptimizeIf(mi, instr->environment());
1163 } else {
Steve Block9fac8402011-05-12 15:51:54 +01001164 __ mov(result, Operand(result, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001165 }
1166 break;
1167 case Token::SHL:
Steve Block9fac8402011-05-12 15:51:54 +01001168 __ mov(result, Operand(result, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001169 break;
1170 default:
1171 UNREACHABLE();
1172 break;
1173 }
1174 } else {
1175 int value = ToInteger32(LConstantOperand::cast(right));
1176 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1177 switch (instr->op()) {
1178 case Token::SAR:
1179 if (shift_count != 0) {
1180 __ mov(result, Operand(result, ASR, shift_count));
1181 }
1182 break;
1183 case Token::SHR:
1184 if (shift_count == 0 && instr->can_deopt()) {
1185 __ tst(result, Operand(0x80000000));
1186 DeoptimizeIf(ne, instr->environment());
1187 } else {
1188 __ mov(result, Operand(result, LSR, shift_count));
1189 }
1190 break;
1191 case Token::SHL:
1192 if (shift_count != 0) {
1193 __ mov(result, Operand(result, LSL, shift_count));
1194 }
1195 break;
1196 default:
1197 UNREACHABLE();
1198 break;
1199 }
1200 }
1201}
1202
1203
1204void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001205 LOperand* left = instr->InputAt(0);
1206 LOperand* right = instr->InputAt(1);
1207 ASSERT(left->Equals(instr->result()));
1208 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1209 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1210
1211 if (right->IsStackSlot() || right->IsArgument()) {
1212 Register right_reg = EmitLoadRegister(right, ip);
1213 __ sub(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1214 } else {
1215 ASSERT(right->IsRegister() || right->IsConstantOperand());
1216 __ sub(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1217 }
1218
1219 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001220 DeoptimizeIf(vs, instr->environment());
1221 }
1222}
1223
1224
1225void LCodeGen::DoConstantI(LConstantI* instr) {
1226 ASSERT(instr->result()->IsRegister());
1227 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1228}
1229
1230
1231void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001232 ASSERT(instr->result()->IsDoubleRegister());
1233 DwVfpRegister result = ToDoubleRegister(instr->result());
1234 double v = instr->value();
1235 __ vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001236}
1237
1238
1239void LCodeGen::DoConstantT(LConstantT* instr) {
1240 ASSERT(instr->result()->IsRegister());
1241 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1242}
1243
1244
Steve Block9fac8402011-05-12 15:51:54 +01001245void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001246 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001247 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001248 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1249}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001250
Ben Murdochb0fe1622011-05-05 13:52:32 +01001251
Steve Block44f0eee2011-05-26 01:26:41 +01001252void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001253 Register result = ToRegister(instr->result());
1254 Register array = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01001255 __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01001256}
1257
1258
Steve Block9fac8402011-05-12 15:51:54 +01001259void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1260 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001261 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001262 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001263}
1264
1265
1266void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001267 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001268 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001269 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001270 ASSERT(input.is(result));
1271 Label done;
1272
1273 // If the object is a smi return the object.
1274 __ tst(input, Operand(kSmiTagMask));
1275 __ b(eq, &done);
1276
1277 // If the object is not a value type, return the object.
1278 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1279 __ b(ne, &done);
1280 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1281
1282 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001283}
1284
1285
1286void LCodeGen::DoBitNotI(LBitNotI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001287 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001288 ASSERT(input->Equals(instr->result()));
1289 __ mvn(ToRegister(input), Operand(ToRegister(input)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001290}
1291
1292
1293void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001294 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001295 __ push(input_reg);
1296 CallRuntime(Runtime::kThrow, 1, instr);
1297
1298 if (FLAG_debug_code) {
1299 __ stop("Unreachable code.");
1300 }
1301}
1302
1303
1304void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001305 LOperand* left = instr->InputAt(0);
1306 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001307 ASSERT(left->Equals(instr->result()));
Steve Block44f0eee2011-05-26 01:26:41 +01001308 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1309 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001310
Steve Block44f0eee2011-05-26 01:26:41 +01001311 if (right->IsStackSlot() || right->IsArgument()) {
1312 Register right_reg = EmitLoadRegister(right, ip);
1313 __ add(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1314 } else {
1315 ASSERT(right->IsRegister() || right->IsConstantOperand());
1316 __ add(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1317 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001318
Steve Block44f0eee2011-05-26 01:26:41 +01001319 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001320 DeoptimizeIf(vs, instr->environment());
1321 }
1322}
1323
1324
1325void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001326 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1327 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001328 switch (instr->op()) {
1329 case Token::ADD:
1330 __ vadd(left, left, right);
1331 break;
1332 case Token::SUB:
1333 __ vsub(left, left, right);
1334 break;
1335 case Token::MUL:
1336 __ vmul(left, left, right);
1337 break;
1338 case Token::DIV:
1339 __ vdiv(left, left, right);
1340 break;
1341 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001342 // Save r0-r3 on the stack.
1343 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1344
1345 __ PrepareCallCFunction(4, scratch0());
1346 __ vmov(r0, r1, left);
1347 __ vmov(r2, r3, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001348 __ CallCFunction(
1349 ExternalReference::double_fp_operation(Token::MOD, isolate()), 4);
Steve Block1e0659c2011-05-24 12:43:12 +01001350 // Move the result in the double result register.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001351 __ GetCFunctionDoubleResult(ToDoubleRegister(instr->result()));
Steve Block1e0659c2011-05-24 12:43:12 +01001352
1353 // Restore r0-r3.
1354 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001355 break;
1356 }
1357 default:
1358 UNREACHABLE();
1359 break;
1360 }
1361}
1362
1363
1364void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001365 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1366 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001367 ASSERT(ToRegister(instr->result()).is(r0));
1368
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001369 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001370 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1371}
1372
1373
1374int LCodeGen::GetNextEmittedBlock(int block) {
1375 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1376 LLabel* label = chunk_->GetLabel(i);
1377 if (!label->HasReplacement()) return i;
1378 }
1379 return -1;
1380}
1381
1382
1383void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1384 int next_block = GetNextEmittedBlock(current_block_);
1385 right_block = chunk_->LookupDestination(right_block);
1386 left_block = chunk_->LookupDestination(left_block);
1387
1388 if (right_block == left_block) {
1389 EmitGoto(left_block);
1390 } else if (left_block == next_block) {
1391 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1392 } else if (right_block == next_block) {
1393 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1394 } else {
1395 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1396 __ b(chunk_->GetAssemblyLabel(right_block));
1397 }
1398}
1399
1400
1401void LCodeGen::DoBranch(LBranch* instr) {
1402 int true_block = chunk_->LookupDestination(instr->true_block_id());
1403 int false_block = chunk_->LookupDestination(instr->false_block_id());
1404
1405 Representation r = instr->hydrogen()->representation();
1406 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001407 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001408 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001409 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001410 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001411 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001412 Register scratch = scratch0();
1413
Ben Murdochb8e0da22011-05-16 14:20:40 +01001414 // Test the double value. Zero and NaN are false.
1415 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1416 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001417 EmitBranch(true_block, false_block, ne);
1418 } else {
1419 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001420 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001421 if (instr->hydrogen()->type().IsBoolean()) {
1422 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1423 __ cmp(reg, ip);
1424 EmitBranch(true_block, false_block, eq);
1425 } else {
1426 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1427 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1428
1429 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1430 __ cmp(reg, ip);
1431 __ b(eq, false_label);
1432 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1433 __ cmp(reg, ip);
1434 __ b(eq, true_label);
1435 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1436 __ cmp(reg, ip);
1437 __ b(eq, false_label);
1438 __ cmp(reg, Operand(0));
1439 __ b(eq, false_label);
1440 __ tst(reg, Operand(kSmiTagMask));
1441 __ b(eq, true_label);
1442
Ben Murdochb8e0da22011-05-16 14:20:40 +01001443 // Test double values. Zero and NaN are false.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001444 Label call_stub;
1445 DoubleRegister dbl_scratch = d0;
Steve Block9fac8402011-05-12 15:51:54 +01001446 Register scratch = scratch0();
1447 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001448 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01001449 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001450 __ b(ne, &call_stub);
1451 __ sub(ip, reg, Operand(kHeapObjectTag));
1452 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001453 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1454 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001455 __ b(ne, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001456 __ b(true_label);
1457
1458 // The conversion stub doesn't cause garbage collections so it's
1459 // safe to not record a safepoint after the call.
1460 __ bind(&call_stub);
1461 ToBooleanStub stub(reg);
1462 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1463 __ stm(db_w, sp, saved_regs);
1464 __ CallStub(&stub);
1465 __ cmp(reg, Operand(0));
1466 __ ldm(ia_w, sp, saved_regs);
Steve Block1e0659c2011-05-24 12:43:12 +01001467 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001468 }
1469 }
1470}
1471
1472
1473void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001474 block = chunk_->LookupDestination(block);
1475 int next_block = GetNextEmittedBlock(current_block_);
1476 if (block != next_block) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001477 // Perform stack overflow check if this goto needs it before jumping.
1478 if (deferred_stack_check != NULL) {
1479 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1480 __ cmp(sp, Operand(ip));
1481 __ b(hs, chunk_->GetAssemblyLabel(block));
1482 __ jmp(deferred_stack_check->entry());
1483 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1484 } else {
1485 __ jmp(chunk_->GetAssemblyLabel(block));
1486 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001487 }
1488}
1489
1490
1491void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001492 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
1493 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001494}
1495
1496
1497void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001498 class DeferredStackCheck: public LDeferredCode {
1499 public:
1500 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1501 : LDeferredCode(codegen), instr_(instr) { }
1502 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1503 private:
1504 LGoto* instr_;
1505 };
1506
1507 DeferredStackCheck* deferred = NULL;
1508 if (instr->include_stack_check()) {
1509 deferred = new DeferredStackCheck(this, instr);
1510 }
1511 EmitGoto(instr->block_id(), deferred);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001512}
1513
1514
1515Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001516 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001517 switch (op) {
1518 case Token::EQ:
1519 case Token::EQ_STRICT:
1520 cond = eq;
1521 break;
1522 case Token::LT:
1523 cond = is_unsigned ? lo : lt;
1524 break;
1525 case Token::GT:
1526 cond = is_unsigned ? hi : gt;
1527 break;
1528 case Token::LTE:
1529 cond = is_unsigned ? ls : le;
1530 break;
1531 case Token::GTE:
1532 cond = is_unsigned ? hs : ge;
1533 break;
1534 case Token::IN:
1535 case Token::INSTANCEOF:
1536 default:
1537 UNREACHABLE();
1538 }
1539 return cond;
1540}
1541
1542
1543void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001544 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001545}
1546
1547
1548void LCodeGen::DoCmpID(LCmpID* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001549 LOperand* left = instr->InputAt(0);
1550 LOperand* right = instr->InputAt(1);
1551 LOperand* result = instr->result();
1552 Register scratch = scratch0();
1553
1554 Label unordered, done;
1555 if (instr->is_double()) {
1556 // Compare left and right as doubles and load the
1557 // resulting flags into the normal status register.
1558 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1559 // If a NaN is involved, i.e. the result is unordered (V set),
1560 // jump to unordered to return false.
1561 __ b(vs, &unordered);
1562 } else {
1563 EmitCmpI(left, right);
1564 }
1565
1566 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1567 __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1568 __ b(cc, &done);
1569
1570 __ bind(&unordered);
1571 __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1572 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001573}
1574
1575
1576void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001577 LOperand* left = instr->InputAt(0);
1578 LOperand* right = instr->InputAt(1);
1579 int false_block = chunk_->LookupDestination(instr->false_block_id());
1580 int true_block = chunk_->LookupDestination(instr->true_block_id());
1581
1582 if (instr->is_double()) {
1583 // Compare left and right as doubles and load the
1584 // resulting flags into the normal status register.
1585 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1586 // If a NaN is involved, i.e. the result is unordered (V set),
1587 // jump to false block label.
1588 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1589 } else {
1590 EmitCmpI(left, right);
1591 }
1592
1593 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1594 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001595}
1596
1597
1598void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001599 Register left = ToRegister(instr->InputAt(0));
1600 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001601 Register result = ToRegister(instr->result());
1602
1603 __ cmp(left, Operand(right));
1604 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1605 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001606}
1607
1608
1609void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001610 Register left = ToRegister(instr->InputAt(0));
1611 Register right = ToRegister(instr->InputAt(1));
1612 int false_block = chunk_->LookupDestination(instr->false_block_id());
1613 int true_block = chunk_->LookupDestination(instr->true_block_id());
1614
1615 __ cmp(left, Operand(right));
1616 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001617}
1618
1619
1620void LCodeGen::DoIsNull(LIsNull* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001621 Register reg = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001622 Register result = ToRegister(instr->result());
1623
1624 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1625 __ cmp(reg, ip);
1626 if (instr->is_strict()) {
1627 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1628 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1629 } else {
1630 Label true_value, false_value, done;
1631 __ b(eq, &true_value);
1632 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1633 __ cmp(ip, reg);
1634 __ b(eq, &true_value);
1635 __ tst(reg, Operand(kSmiTagMask));
1636 __ b(eq, &false_value);
1637 // Check for undetectable objects by looking in the bit field in
1638 // the map. The object has already been smi checked.
1639 Register scratch = result;
1640 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1641 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1642 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1643 __ b(ne, &true_value);
1644 __ bind(&false_value);
1645 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1646 __ jmp(&done);
1647 __ bind(&true_value);
1648 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1649 __ bind(&done);
1650 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001651}
1652
1653
1654void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001655 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001656 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001657
1658 // TODO(fsc): If the expression is known to be a smi, then it's
1659 // definitely not null. Jump to the false block.
1660
1661 int true_block = chunk_->LookupDestination(instr->true_block_id());
1662 int false_block = chunk_->LookupDestination(instr->false_block_id());
1663
1664 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1665 __ cmp(reg, ip);
1666 if (instr->is_strict()) {
1667 EmitBranch(true_block, false_block, eq);
1668 } else {
1669 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1670 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1671 __ b(eq, true_label);
1672 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1673 __ cmp(reg, ip);
1674 __ b(eq, true_label);
1675 __ tst(reg, Operand(kSmiTagMask));
1676 __ b(eq, false_label);
1677 // Check for undetectable objects by looking in the bit field in
1678 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001679 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1680 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1681 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1682 EmitBranch(true_block, false_block, ne);
1683 }
1684}
1685
1686
1687Condition LCodeGen::EmitIsObject(Register input,
1688 Register temp1,
1689 Register temp2,
1690 Label* is_not_object,
1691 Label* is_object) {
Steve Block1e0659c2011-05-24 12:43:12 +01001692 __ JumpIfSmi(input, is_not_object);
1693
1694 __ LoadRoot(temp1, Heap::kNullValueRootIndex);
1695 __ cmp(input, temp1);
1696 __ b(eq, is_object);
1697
1698 // Load map.
1699 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1700 // Undetectable objects behave like undefined.
1701 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1702 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1703 __ b(ne, is_not_object);
1704
1705 // Load instance type and check that it is in object type range.
1706 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
1707 __ cmp(temp2, Operand(FIRST_JS_OBJECT_TYPE));
1708 __ b(lt, is_not_object);
1709 __ cmp(temp2, Operand(LAST_JS_OBJECT_TYPE));
1710 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001711}
1712
1713
1714void LCodeGen::DoIsObject(LIsObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001715 Register reg = ToRegister(instr->InputAt(0));
1716 Register result = ToRegister(instr->result());
1717 Register temp = scratch0();
1718 Label is_false, is_true, done;
1719
1720 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1721 __ b(true_cond, &is_true);
1722
1723 __ bind(&is_false);
1724 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1725 __ b(&done);
1726
1727 __ bind(&is_true);
1728 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1729
1730 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001731}
1732
1733
1734void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001735 Register reg = ToRegister(instr->InputAt(0));
1736 Register temp1 = ToRegister(instr->TempAt(0));
1737 Register temp2 = scratch0();
1738
1739 int true_block = chunk_->LookupDestination(instr->true_block_id());
1740 int false_block = chunk_->LookupDestination(instr->false_block_id());
1741 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1742 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1743
1744 Condition true_cond =
1745 EmitIsObject(reg, temp1, temp2, false_label, true_label);
1746
1747 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001748}
1749
1750
1751void LCodeGen::DoIsSmi(LIsSmi* instr) {
1752 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1753 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001754 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001755 __ tst(input_reg, Operand(kSmiTagMask));
1756 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1757 Label done;
1758 __ b(eq, &done);
1759 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1760 __ bind(&done);
1761}
1762
1763
1764void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1765 int true_block = chunk_->LookupDestination(instr->true_block_id());
1766 int false_block = chunk_->LookupDestination(instr->false_block_id());
1767
Steve Block1e0659c2011-05-24 12:43:12 +01001768 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001769 __ tst(input_reg, Operand(kSmiTagMask));
1770 EmitBranch(true_block, false_block, eq);
1771}
1772
1773
Steve Block1e0659c2011-05-24 12:43:12 +01001774static InstanceType TestType(HHasInstanceType* instr) {
1775 InstanceType from = instr->from();
1776 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001777 if (from == FIRST_TYPE) return to;
1778 ASSERT(from == to || to == LAST_TYPE);
1779 return from;
1780}
1781
1782
Steve Block1e0659c2011-05-24 12:43:12 +01001783static Condition BranchCondition(HHasInstanceType* instr) {
1784 InstanceType from = instr->from();
1785 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001786 if (from == to) return eq;
1787 if (to == LAST_TYPE) return hs;
1788 if (from == FIRST_TYPE) return ls;
1789 UNREACHABLE();
1790 return eq;
1791}
1792
1793
1794void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001795 Register input = ToRegister(instr->InputAt(0));
1796 Register result = ToRegister(instr->result());
1797
1798 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1799 Label done;
1800 __ tst(input, Operand(kSmiTagMask));
1801 __ LoadRoot(result, Heap::kFalseValueRootIndex, eq);
1802 __ b(eq, &done);
1803 __ CompareObjectType(input, result, result, TestType(instr->hydrogen()));
1804 Condition cond = BranchCondition(instr->hydrogen());
1805 __ LoadRoot(result, Heap::kTrueValueRootIndex, cond);
1806 __ LoadRoot(result, Heap::kFalseValueRootIndex, NegateCondition(cond));
1807 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001808}
1809
1810
1811void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001812 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001813 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001814
1815 int true_block = chunk_->LookupDestination(instr->true_block_id());
1816 int false_block = chunk_->LookupDestination(instr->false_block_id());
1817
1818 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1819
1820 __ tst(input, Operand(kSmiTagMask));
1821 __ b(eq, false_label);
1822
Steve Block1e0659c2011-05-24 12:43:12 +01001823 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1824 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001825}
1826
1827
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001828void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1829 Register input = ToRegister(instr->InputAt(0));
1830 Register result = ToRegister(instr->result());
1831
1832 if (FLAG_debug_code) {
1833 __ AbortIfNotString(input);
1834 }
1835
1836 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1837 __ IndexFromHash(result, result);
1838}
1839
1840
Ben Murdochb0fe1622011-05-05 13:52:32 +01001841void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001842 Register input = ToRegister(instr->InputAt(0));
1843 Register result = ToRegister(instr->result());
1844 Register scratch = scratch0();
1845
1846 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1847 __ ldr(scratch,
1848 FieldMemOperand(input, String::kHashFieldOffset));
1849 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1850 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1851 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001852}
1853
1854
1855void LCodeGen::DoHasCachedArrayIndexAndBranch(
1856 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001857 Register input = ToRegister(instr->InputAt(0));
1858 Register scratch = scratch0();
1859
1860 int true_block = chunk_->LookupDestination(instr->true_block_id());
1861 int false_block = chunk_->LookupDestination(instr->false_block_id());
1862
1863 __ ldr(scratch,
1864 FieldMemOperand(input, String::kHashFieldOffset));
1865 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1866 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001867}
1868
1869
Ben Murdochb8e0da22011-05-16 14:20:40 +01001870// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001871// the temp registers, but not the input. Only input and temp2 may alias.
1872void LCodeGen::EmitClassOfTest(Label* is_true,
1873 Label* is_false,
1874 Handle<String>class_name,
1875 Register input,
1876 Register temp,
1877 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001878 ASSERT(!input.is(temp));
1879 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1880 __ tst(input, Operand(kSmiTagMask));
1881 __ b(eq, is_false);
1882 __ CompareObjectType(input, temp, temp2, FIRST_JS_OBJECT_TYPE);
1883 __ b(lt, is_false);
1884
1885 // Map is now in temp.
1886 // Functions have class 'Function'.
1887 __ CompareInstanceType(temp, temp2, JS_FUNCTION_TYPE);
1888 if (class_name->IsEqualTo(CStrVector("Function"))) {
1889 __ b(eq, is_true);
1890 } else {
1891 __ b(eq, is_false);
1892 }
1893
1894 // Check if the constructor in the map is a function.
1895 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1896
1897 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1898 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1899 // LAST_JS_OBJECT_TYPE.
1900 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1901 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1902
1903 // Objects with a non-function constructor have class 'Object'.
1904 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1905 if (class_name->IsEqualTo(CStrVector("Object"))) {
1906 __ b(ne, is_true);
1907 } else {
1908 __ b(ne, is_false);
1909 }
1910
1911 // temp now contains the constructor function. Grab the
1912 // instance class name from there.
1913 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1914 __ ldr(temp, FieldMemOperand(temp,
1915 SharedFunctionInfo::kInstanceClassNameOffset));
1916 // The class name we are testing against is a symbol because it's a literal.
1917 // The name in the constructor is a symbol because of the way the context is
1918 // booted. This routine isn't expected to work for random API-created
1919 // classes and it doesn't have to because you can't access it with natives
1920 // syntax. Since both sides are symbols it is sufficient to use an identity
1921 // comparison.
1922 __ cmp(temp, Operand(class_name));
1923 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001924}
1925
1926
1927void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001928 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001929 Register result = ToRegister(instr->result());
1930 ASSERT(input.is(result));
1931 Handle<String> class_name = instr->hydrogen()->class_name();
1932
1933 Label done, is_true, is_false;
1934
1935 EmitClassOfTest(&is_true, &is_false, class_name, input, scratch0(), input);
1936 __ b(ne, &is_false);
1937
1938 __ bind(&is_true);
1939 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1940 __ jmp(&done);
1941
1942 __ bind(&is_false);
1943 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1944 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001945}
1946
1947
1948void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001949 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001950 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001951 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001952 Handle<String> class_name = instr->hydrogen()->class_name();
1953
1954 int true_block = chunk_->LookupDestination(instr->true_block_id());
1955 int false_block = chunk_->LookupDestination(instr->false_block_id());
1956
1957 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1958 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1959
1960 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1961
1962 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001963}
1964
1965
1966void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001967 Register reg = ToRegister(instr->InputAt(0));
1968 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001969 int true_block = instr->true_block_id();
1970 int false_block = instr->false_block_id();
1971
1972 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1973 __ cmp(temp, Operand(instr->map()));
1974 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001975}
1976
1977
1978void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001979 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1980 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01001981
Ben Murdochb0fe1622011-05-05 13:52:32 +01001982 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1983 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1984
Steve Block44f0eee2011-05-26 01:26:41 +01001985 __ cmp(r0, Operand(0));
1986 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
1987 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001988}
1989
1990
1991void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001992 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1993 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
1994
1995 int true_block = chunk_->LookupDestination(instr->true_block_id());
1996 int false_block = chunk_->LookupDestination(instr->false_block_id());
1997
1998 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1999 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01002000 __ cmp(r0, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01002001 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002002}
2003
2004
Ben Murdoch086aeea2011-05-13 15:57:08 +01002005void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002006 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2007 public:
2008 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2009 LInstanceOfKnownGlobal* instr)
2010 : LDeferredCode(codegen), instr_(instr) { }
2011 virtual void Generate() {
2012 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
2013 }
2014
2015 Label* map_check() { return &map_check_; }
2016
2017 private:
2018 LInstanceOfKnownGlobal* instr_;
2019 Label map_check_;
2020 };
2021
2022 DeferredInstanceOfKnownGlobal* deferred;
2023 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2024
2025 Label done, false_result;
2026 Register object = ToRegister(instr->InputAt(0));
2027 Register temp = ToRegister(instr->TempAt(0));
2028 Register result = ToRegister(instr->result());
2029
2030 ASSERT(object.is(r0));
2031 ASSERT(result.is(r0));
2032
2033 // A Smi is not instance of anything.
2034 __ JumpIfSmi(object, &false_result);
2035
2036 // This is the inlined call site instanceof cache. The two occurences of the
2037 // hole value will be patched to the last map/result pair generated by the
2038 // instanceof stub.
2039 Label cache_miss;
2040 Register map = temp;
2041 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2042 __ bind(deferred->map_check()); // Label for calculating code patching.
2043 // We use Factory::the_hole_value() on purpose instead of loading from the
2044 // root array to force relocation to be able to later patch with
2045 // the cached map.
Steve Block44f0eee2011-05-26 01:26:41 +01002046 __ mov(ip, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002047 __ cmp(map, Operand(ip));
2048 __ b(ne, &cache_miss);
2049 // We use Factory::the_hole_value() on purpose instead of loading from the
2050 // root array to force relocation to be able to later patch
2051 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002052 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002053 __ b(&done);
2054
2055 // The inlined call site cache did not match. Check null and string before
2056 // calling the deferred code.
2057 __ bind(&cache_miss);
2058 // Null is not instance of anything.
2059 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2060 __ cmp(object, Operand(ip));
2061 __ b(eq, &false_result);
2062
2063 // String values is not instance of anything.
2064 Condition is_string = masm_->IsObjectStringType(object, temp);
2065 __ b(is_string, &false_result);
2066
2067 // Go to the deferred code.
2068 __ b(deferred->entry());
2069
2070 __ bind(&false_result);
2071 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2072
2073 // Here result has either true or false. Deferred code also produces true or
2074 // false object.
2075 __ bind(deferred->exit());
2076 __ bind(&done);
2077}
2078
2079
2080void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2081 Label* map_check) {
2082 Register result = ToRegister(instr->result());
2083 ASSERT(result.is(r0));
2084
2085 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2086 flags = static_cast<InstanceofStub::Flags>(
2087 flags | InstanceofStub::kArgsInRegisters);
2088 flags = static_cast<InstanceofStub::Flags>(
2089 flags | InstanceofStub::kCallSiteInlineCheck);
2090 flags = static_cast<InstanceofStub::Flags>(
2091 flags | InstanceofStub::kReturnTrueFalseObject);
2092 InstanceofStub stub(flags);
2093
Ben Murdoch8b112d22011-06-08 16:22:53 +01002094 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002095
2096 // Get the temp register reserved by the instruction. This needs to be r4 as
2097 // its slot of the pushing of safepoint registers is used to communicate the
2098 // offset to the location of the map check.
2099 Register temp = ToRegister(instr->TempAt(0));
2100 ASSERT(temp.is(r4));
2101 __ mov(InstanceofStub::right(), Operand(instr->function()));
2102 static const int kAdditionalDelta = 4;
2103 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2104 Label before_push_delta;
2105 __ bind(&before_push_delta);
2106 __ BlockConstPoolFor(kAdditionalDelta);
2107 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002108 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002109 CallCodeGeneric(stub.GetCode(),
2110 RelocInfo::CODE_TARGET,
2111 instr,
2112 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Steve Block1e0659c2011-05-24 12:43:12 +01002113 // Put the result value into the result register slot and
2114 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002115 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002116}
2117
Ben Murdochb0fe1622011-05-05 13:52:32 +01002118
2119static Condition ComputeCompareCondition(Token::Value op) {
2120 switch (op) {
2121 case Token::EQ_STRICT:
2122 case Token::EQ:
2123 return eq;
2124 case Token::LT:
2125 return lt;
2126 case Token::GT:
2127 return gt;
2128 case Token::LTE:
2129 return le;
2130 case Token::GTE:
2131 return ge;
2132 default:
2133 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002134 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002135 }
2136}
2137
2138
2139void LCodeGen::DoCmpT(LCmpT* instr) {
2140 Token::Value op = instr->op();
2141
2142 Handle<Code> ic = CompareIC::GetUninitialized(op);
2143 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002144 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002145
2146 Condition condition = ComputeCompareCondition(op);
2147 if (op == Token::GT || op == Token::LTE) {
2148 condition = ReverseCondition(condition);
2149 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002150 __ LoadRoot(ToRegister(instr->result()),
2151 Heap::kTrueValueRootIndex,
2152 condition);
2153 __ LoadRoot(ToRegister(instr->result()),
2154 Heap::kFalseValueRootIndex,
2155 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002156}
2157
2158
2159void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002160 Token::Value op = instr->op();
2161 int true_block = chunk_->LookupDestination(instr->true_block_id());
2162 int false_block = chunk_->LookupDestination(instr->false_block_id());
2163
2164 Handle<Code> ic = CompareIC::GetUninitialized(op);
2165 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2166
2167 // The compare stub expects compare condition and the input operands
2168 // reversed for GT and LTE.
2169 Condition condition = ComputeCompareCondition(op);
2170 if (op == Token::GT || op == Token::LTE) {
2171 condition = ReverseCondition(condition);
2172 }
2173 __ cmp(r0, Operand(0));
2174 EmitBranch(true_block, false_block, condition);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002175}
2176
2177
2178void LCodeGen::DoReturn(LReturn* instr) {
2179 if (FLAG_trace) {
2180 // Push the return value on the stack as the parameter.
2181 // Runtime::TraceExit returns its parameter in r0.
2182 __ push(r0);
2183 __ CallRuntime(Runtime::kTraceExit, 1);
2184 }
Steve Block053d10c2011-06-13 19:13:29 +01002185 int32_t sp_delta = (ParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002186 __ mov(sp, fp);
2187 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2188 __ add(sp, sp, Operand(sp_delta));
2189 __ Jump(lr);
2190}
2191
2192
Ben Murdoch8b112d22011-06-08 16:22:53 +01002193void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002194 Register result = ToRegister(instr->result());
2195 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2196 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2197 if (instr->hydrogen()->check_hole_value()) {
2198 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2199 __ cmp(result, ip);
2200 DeoptimizeIf(eq, instr->environment());
2201 }
2202}
2203
2204
Ben Murdoch8b112d22011-06-08 16:22:53 +01002205void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2206 ASSERT(ToRegister(instr->global_object()).is(r0));
2207 ASSERT(ToRegister(instr->result()).is(r0));
2208
2209 __ mov(r2, Operand(instr->name()));
2210 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2211 : RelocInfo::CODE_TARGET_CONTEXT;
2212 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2213 CallCode(ic, mode, instr);
2214}
2215
2216
2217void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002218 Register value = ToRegister(instr->InputAt(0));
2219 Register scratch = scratch0();
2220
2221 // Load the cell.
2222 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2223
2224 // If the cell we are storing to contains the hole it could have
2225 // been deleted from the property dictionary. In that case, we need
2226 // to update the property details in the property dictionary to mark
2227 // it as no longer deleted.
2228 if (instr->hydrogen()->check_hole_value()) {
2229 Register scratch2 = ToRegister(instr->TempAt(0));
2230 __ ldr(scratch2,
2231 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2232 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2233 __ cmp(scratch2, ip);
2234 DeoptimizeIf(eq, instr->environment());
2235 }
2236
2237 // Store the value.
2238 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002239}
2240
2241
Ben Murdoch8b112d22011-06-08 16:22:53 +01002242void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2243 ASSERT(ToRegister(instr->global_object()).is(r1));
2244 ASSERT(ToRegister(instr->value()).is(r0));
2245
2246 __ mov(r2, Operand(instr->name()));
2247 Handle<Code> ic = instr->strict_mode()
2248 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2249 : isolate()->builtins()->StoreIC_Initialize();
2250 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2251}
2252
2253
Ben Murdochb8e0da22011-05-16 14:20:40 +01002254void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002255 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002256 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002257 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002258}
2259
2260
Steve Block1e0659c2011-05-24 12:43:12 +01002261void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2262 Register context = ToRegister(instr->context());
2263 Register value = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002264 __ str(value, ContextOperand(context, instr->slot_index()));
2265 if (instr->needs_write_barrier()) {
2266 int offset = Context::SlotOffset(instr->slot_index());
2267 __ RecordWrite(context, Operand(offset), value, scratch0());
2268 }
2269}
2270
2271
Ben Murdochb0fe1622011-05-05 13:52:32 +01002272void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002273 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002274 Register result = ToRegister(instr->result());
2275 if (instr->hydrogen()->is_in_object()) {
2276 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2277 } else {
2278 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2279 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2280 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002281}
2282
2283
Steve Block44f0eee2011-05-26 01:26:41 +01002284void LCodeGen::EmitLoadField(Register result,
2285 Register object,
2286 Handle<Map> type,
2287 Handle<String> name) {
2288 LookupResult lookup;
2289 type->LookupInDescriptors(NULL, *name, &lookup);
2290 ASSERT(lookup.IsProperty() && lookup.type() == FIELD);
2291 int index = lookup.GetLocalFieldIndexFromMap(*type);
2292 int offset = index * kPointerSize;
2293 if (index < 0) {
2294 // Negative property indices are in-object properties, indexed
2295 // from the end of the fixed part of the object.
2296 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2297 } else {
2298 // Non-negative property indices are in the properties array.
2299 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2300 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2301 }
2302}
2303
2304
2305void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2306 Register object = ToRegister(instr->object());
2307 Register result = ToRegister(instr->result());
2308 Register scratch = scratch0();
2309 int map_count = instr->hydrogen()->types()->length();
2310 Handle<String> name = instr->hydrogen()->name();
2311 if (map_count == 0) {
2312 ASSERT(instr->hydrogen()->need_generic());
2313 __ mov(r2, Operand(name));
2314 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2315 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2316 } else {
2317 Label done;
2318 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2319 for (int i = 0; i < map_count - 1; ++i) {
2320 Handle<Map> map = instr->hydrogen()->types()->at(i);
2321 Label next;
2322 __ cmp(scratch, Operand(map));
2323 __ b(ne, &next);
2324 EmitLoadField(result, object, map, name);
2325 __ b(&done);
2326 __ bind(&next);
2327 }
2328 Handle<Map> map = instr->hydrogen()->types()->last();
2329 __ cmp(scratch, Operand(map));
2330 if (instr->hydrogen()->need_generic()) {
2331 Label generic;
2332 __ b(ne, &generic);
2333 EmitLoadField(result, object, map, name);
2334 __ b(&done);
2335 __ bind(&generic);
2336 __ mov(r2, Operand(name));
2337 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2338 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2339 } else {
2340 DeoptimizeIf(ne, instr->environment());
2341 EmitLoadField(result, object, map, name);
2342 }
2343 __ bind(&done);
2344 }
2345}
2346
2347
Ben Murdochb0fe1622011-05-05 13:52:32 +01002348void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2349 ASSERT(ToRegister(instr->object()).is(r0));
2350 ASSERT(ToRegister(instr->result()).is(r0));
2351
2352 // Name is always in r2.
2353 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002354 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002355 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2356}
2357
2358
Steve Block9fac8402011-05-12 15:51:54 +01002359void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2360 Register scratch = scratch0();
2361 Register function = ToRegister(instr->function());
2362 Register result = ToRegister(instr->result());
2363
2364 // Check that the function really is a function. Load map into the
2365 // result register.
2366 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2367 DeoptimizeIf(ne, instr->environment());
2368
2369 // Make sure that the function has an instance prototype.
2370 Label non_instance;
2371 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2372 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2373 __ b(ne, &non_instance);
2374
2375 // Get the prototype or initial map from the function.
2376 __ ldr(result,
2377 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2378
2379 // Check that the function has a prototype or an initial map.
2380 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2381 __ cmp(result, ip);
2382 DeoptimizeIf(eq, instr->environment());
2383
2384 // If the function does not have an initial map, we're done.
2385 Label done;
2386 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2387 __ b(ne, &done);
2388
2389 // Get the prototype from the initial map.
2390 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2391 __ jmp(&done);
2392
2393 // Non-instance prototype: Fetch prototype from constructor field
2394 // in initial map.
2395 __ bind(&non_instance);
2396 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2397
2398 // All done.
2399 __ bind(&done);
2400}
2401
2402
Ben Murdochb0fe1622011-05-05 13:52:32 +01002403void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002404 Register result = ToRegister(instr->result());
2405 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002406 Register scratch = scratch0();
2407
Steve Block1e0659c2011-05-24 12:43:12 +01002408 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002409 if (FLAG_debug_code) {
2410 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01002411 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002412 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2413 __ cmp(scratch, ip);
2414 __ b(eq, &done);
2415 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2416 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002417 __ b(eq, &done);
2418 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
2419 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2420 __ sub(scratch, scratch, Operand(FIRST_EXTERNAL_ARRAY_TYPE));
2421 __ cmp(scratch, Operand(kExternalArrayTypeCount));
2422 __ Check(cc, "Check for fast elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002423 __ bind(&done);
2424 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002425}
2426
2427
Steve Block44f0eee2011-05-26 01:26:41 +01002428void LCodeGen::DoLoadExternalArrayPointer(
2429 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002430 Register to_reg = ToRegister(instr->result());
2431 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002432 __ ldr(to_reg, FieldMemOperand(from_reg,
2433 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002434}
2435
2436
Ben Murdochb0fe1622011-05-05 13:52:32 +01002437void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002438 Register arguments = ToRegister(instr->arguments());
2439 Register length = ToRegister(instr->length());
2440 Register index = ToRegister(instr->index());
2441 Register result = ToRegister(instr->result());
2442
2443 // Bailout index is not a valid argument index. Use unsigned check to get
2444 // negative check for free.
2445 __ sub(length, length, index, SetCC);
2446 DeoptimizeIf(ls, instr->environment());
2447
2448 // There are two words between the frame pointer and the last argument.
2449 // Subtracting from length accounts for one of them add one more.
2450 __ add(length, length, Operand(1));
2451 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002452}
2453
2454
2455void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002456 Register elements = ToRegister(instr->elements());
2457 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002458 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002459 Register scratch = scratch0();
Ben Murdochb8e0da22011-05-16 14:20:40 +01002460 ASSERT(result.is(elements));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002461
2462 // Load the result.
2463 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2464 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2465
Ben Murdochb8e0da22011-05-16 14:20:40 +01002466 // Check for the hole value.
2467 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2468 __ cmp(result, scratch);
2469 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002470}
2471
2472
Steve Block44f0eee2011-05-26 01:26:41 +01002473void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2474 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002475 Register external_pointer = ToRegister(instr->external_pointer());
Steve Block1e0659c2011-05-24 12:43:12 +01002476 Register key = ToRegister(instr->key());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002477 ExternalArrayType array_type = instr->array_type();
2478 if (array_type == kExternalFloatArray) {
2479 CpuFeatures::Scope scope(VFP3);
2480 DwVfpRegister result(ToDoubleRegister(instr->result()));
2481 __ add(scratch0(), external_pointer, Operand(key, LSL, 2));
2482 __ vldr(result.low(), scratch0(), 0);
2483 __ vcvt_f64_f32(result, result.low());
2484 } else {
2485 Register result(ToRegister(instr->result()));
2486 switch (array_type) {
2487 case kExternalByteArray:
2488 __ ldrsb(result, MemOperand(external_pointer, key));
2489 break;
2490 case kExternalUnsignedByteArray:
2491 case kExternalPixelArray:
2492 __ ldrb(result, MemOperand(external_pointer, key));
2493 break;
2494 case kExternalShortArray:
2495 __ ldrsh(result, MemOperand(external_pointer, key, LSL, 1));
2496 break;
2497 case kExternalUnsignedShortArray:
2498 __ ldrh(result, MemOperand(external_pointer, key, LSL, 1));
2499 break;
2500 case kExternalIntArray:
2501 __ ldr(result, MemOperand(external_pointer, key, LSL, 2));
2502 break;
2503 case kExternalUnsignedIntArray:
2504 __ ldr(result, MemOperand(external_pointer, key, LSL, 2));
2505 __ cmp(result, Operand(0x80000000));
2506 // TODO(danno): we could be more clever here, perhaps having a special
2507 // version of the stub that detects if the overflow case actually
2508 // happens, and generate code that returns a double rather than int.
2509 DeoptimizeIf(cs, instr->environment());
2510 break;
2511 case kExternalFloatArray:
2512 UNREACHABLE();
2513 break;
2514 }
2515 }
Steve Block1e0659c2011-05-24 12:43:12 +01002516}
2517
2518
Ben Murdochb0fe1622011-05-05 13:52:32 +01002519void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2520 ASSERT(ToRegister(instr->object()).is(r1));
2521 ASSERT(ToRegister(instr->key()).is(r0));
2522
Steve Block44f0eee2011-05-26 01:26:41 +01002523 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002524 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2525}
2526
2527
2528void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002529 Register scratch = scratch0();
2530 Register result = ToRegister(instr->result());
2531
2532 // Check if the calling frame is an arguments adaptor frame.
2533 Label done, adapted;
2534 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2535 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2536 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2537
2538 // Result is the frame pointer for the frame if not adapted and for the real
2539 // frame below the adaptor frame if adapted.
2540 __ mov(result, fp, LeaveCC, ne);
2541 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002542}
2543
2544
2545void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002546 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002547 Register result = ToRegister(instr->result());
2548
2549 Label done;
2550
2551 // If no arguments adaptor frame the number of arguments is fixed.
2552 __ cmp(fp, elem);
2553 __ mov(result, Operand(scope()->num_parameters()));
2554 __ b(eq, &done);
2555
2556 // Arguments adaptor frame present. Get argument length from there.
2557 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2558 __ ldr(result,
2559 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2560 __ SmiUntag(result);
2561
2562 // Argument length is in result register.
2563 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002564}
2565
2566
2567void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002568 Register receiver = ToRegister(instr->receiver());
2569 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002570 Register length = ToRegister(instr->length());
2571 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002572 Register scratch = scratch0();
2573 ASSERT(receiver.is(r0)); // Used for parameter count.
2574 ASSERT(function.is(r1)); // Required by InvokeFunction.
2575 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002576
Steve Block1e0659c2011-05-24 12:43:12 +01002577 // If the receiver is null or undefined, we have to pass the global object
2578 // as a receiver.
2579 Label global_object, receiver_ok;
2580 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2581 __ cmp(receiver, scratch);
2582 __ b(eq, &global_object);
2583 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2584 __ cmp(receiver, scratch);
2585 __ b(eq, &global_object);
2586
2587 // Deoptimize if the receiver is not a JS object.
2588 __ tst(receiver, Operand(kSmiTagMask));
2589 DeoptimizeIf(eq, instr->environment());
2590 __ CompareObjectType(receiver, scratch, scratch, FIRST_JS_OBJECT_TYPE);
2591 DeoptimizeIf(lo, instr->environment());
2592 __ jmp(&receiver_ok);
2593
2594 __ bind(&global_object);
2595 __ ldr(receiver, GlobalObjectOperand());
2596 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002597
2598 // Copy the arguments to this function possibly from the
2599 // adaptor frame below it.
2600 const uint32_t kArgumentsLimit = 1 * KB;
2601 __ cmp(length, Operand(kArgumentsLimit));
2602 DeoptimizeIf(hi, instr->environment());
2603
2604 // Push the receiver and use the register to keep the original
2605 // number of arguments.
2606 __ push(receiver);
2607 __ mov(receiver, length);
2608 // The arguments are at a one pointer size offset from elements.
2609 __ add(elements, elements, Operand(1 * kPointerSize));
2610
2611 // Loop through the arguments pushing them onto the execution
2612 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002613 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002614 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002615 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002616 __ b(eq, &invoke);
2617 __ bind(&loop);
2618 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2619 __ push(scratch);
2620 __ sub(length, length, Operand(1), SetCC);
2621 __ b(ne, &loop);
2622
2623 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002624 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2625 LPointerMap* pointers = instr->pointer_map();
2626 LEnvironment* env = instr->deoptimization_environment();
2627 RecordPosition(pointers->position());
2628 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002629 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002630 pointers,
2631 env->deoptimization_index());
2632 // The number of arguments is stored in receiver which is r0, as expected
2633 // by InvokeFunction.
2634 v8::internal::ParameterCount actual(receiver);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002635 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
Steve Block1e0659c2011-05-24 12:43:12 +01002636 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002637}
2638
2639
2640void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002641 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002642 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2643 Abort("DoPushArgument not implemented for double type.");
2644 } else {
2645 Register argument_reg = EmitLoadRegister(argument, ip);
2646 __ push(argument_reg);
2647 }
2648}
2649
2650
Steve Block1e0659c2011-05-24 12:43:12 +01002651void LCodeGen::DoContext(LContext* instr) {
2652 Register result = ToRegister(instr->result());
2653 __ mov(result, cp);
2654}
2655
2656
2657void LCodeGen::DoOuterContext(LOuterContext* instr) {
2658 Register context = ToRegister(instr->context());
2659 Register result = ToRegister(instr->result());
2660 __ ldr(result,
2661 MemOperand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2662 __ ldr(result, FieldMemOperand(result, JSFunction::kContextOffset));
2663}
2664
2665
Ben Murdochb0fe1622011-05-05 13:52:32 +01002666void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002667 Register context = ToRegister(instr->context());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002668 Register result = ToRegister(instr->result());
2669 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2670}
2671
2672
2673void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002674 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002675 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002676 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002677}
2678
2679
2680void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2681 int arity,
2682 LInstruction* instr) {
2683 // Change context if needed.
2684 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002685 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002686 scope()->contains_with() ||
2687 (scope()->num_heap_slots() > 0);
2688 if (change_context) {
2689 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2690 }
2691
2692 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2693 // is available to write to at this point.
2694 if (!function->NeedsArgumentsAdaption()) {
2695 __ mov(r0, Operand(arity));
2696 }
2697
2698 LPointerMap* pointers = instr->pointer_map();
2699 RecordPosition(pointers->position());
2700
2701 // Invoke function.
2702 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2703 __ Call(ip);
2704
2705 // Setup deoptimization.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002706 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002707
2708 // Restore context.
2709 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2710}
2711
2712
2713void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002714 ASSERT(ToRegister(instr->result()).is(r0));
2715 __ mov(r1, Operand(instr->function()));
2716 CallKnownFunction(instr->function(), instr->arity(), instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002717}
2718
2719
2720void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002721 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2722 Register input = ToRegister(instr->InputAt(0));
2723 Register scratch = scratch0();
2724
2725 // Deoptimize if not a heap number.
2726 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2727 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2728 __ cmp(scratch, Operand(ip));
2729 DeoptimizeIf(ne, instr->environment());
2730
2731 Label done;
2732 Register exponent = scratch0();
2733 scratch = no_reg;
2734 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2735 // Check the sign of the argument. If the argument is positive, just
2736 // return it. We do not need to patch the stack since |input| and
2737 // |result| are the same register and |input| would be restored
2738 // unchanged by popping safepoint registers.
2739 __ tst(exponent, Operand(HeapNumber::kSignMask));
2740 __ b(eq, &done);
2741
2742 // Input is negative. Reverse its sign.
2743 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002744 {
2745 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002746
Ben Murdoch8b112d22011-06-08 16:22:53 +01002747 // Registers were saved at the safepoint, so we can use
2748 // many scratch registers.
2749 Register tmp1 = input.is(r1) ? r0 : r1;
2750 Register tmp2 = input.is(r2) ? r0 : r2;
2751 Register tmp3 = input.is(r3) ? r0 : r3;
2752 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01002753
Ben Murdoch8b112d22011-06-08 16:22:53 +01002754 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01002755
Ben Murdoch8b112d22011-06-08 16:22:53 +01002756 Label allocated, slow;
2757 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2758 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2759 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01002760
Ben Murdoch8b112d22011-06-08 16:22:53 +01002761 // Slow case: Call the runtime system to do the number allocation.
2762 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01002763
Ben Murdoch8b112d22011-06-08 16:22:53 +01002764 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2765 // Set the pointer to the new heap number in tmp.
2766 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2767 // Restore input_reg after call to runtime.
2768 __ LoadFromSafepointRegisterSlot(input, input);
2769 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002770
Ben Murdoch8b112d22011-06-08 16:22:53 +01002771 __ bind(&allocated);
2772 // exponent: floating point exponent value.
2773 // tmp1: allocated heap number.
2774 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2775 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2776 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2777 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002778
Ben Murdoch8b112d22011-06-08 16:22:53 +01002779 __ StoreToSafepointRegisterSlot(tmp1, input);
2780 }
Steve Block1e0659c2011-05-24 12:43:12 +01002781
2782 __ bind(&done);
2783}
2784
2785
2786void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2787 Register input = ToRegister(instr->InputAt(0));
2788 __ cmp(input, Operand(0));
2789 // We can make rsb conditional because the previous cmp instruction
2790 // will clear the V (overflow) flag and rsb won't set this flag
2791 // if input is positive.
2792 __ rsb(input, input, Operand(0), SetCC, mi);
2793 // Deoptimize on overflow.
2794 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002795}
2796
2797
2798void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002799 // Class for deferred case.
2800 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2801 public:
2802 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2803 LUnaryMathOperation* instr)
2804 : LDeferredCode(codegen), instr_(instr) { }
2805 virtual void Generate() {
2806 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2807 }
2808 private:
2809 LUnaryMathOperation* instr_;
2810 };
2811
2812 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2813 Representation r = instr->hydrogen()->value()->representation();
2814 if (r.IsDouble()) {
2815 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
2816 __ vabs(input, input);
2817 } else if (r.IsInteger32()) {
2818 EmitIntegerMathAbs(instr);
2819 } else {
2820 // Representation is tagged.
2821 DeferredMathAbsTaggedHeapNumber* deferred =
2822 new DeferredMathAbsTaggedHeapNumber(this, instr);
2823 Register input = ToRegister(instr->InputAt(0));
2824 // Smi check.
2825 __ JumpIfNotSmi(input, deferred->entry());
2826 // If smi, handle it directly.
2827 EmitIntegerMathAbs(instr);
2828 __ bind(deferred->exit());
2829 }
2830}
2831
2832
Ben Murdochb0fe1622011-05-05 13:52:32 +01002833void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002834 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002835 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002836 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002837 Register scratch1 = scratch0();
2838 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002839
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002840 __ EmitVFPTruncate(kRoundToMinusInf,
2841 single_scratch,
2842 input,
2843 scratch1,
2844 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002845 DeoptimizeIf(ne, instr->environment());
2846
2847 // Move the result back to general purpose register r0.
2848 __ vmov(result, single_scratch);
2849
Steve Block44f0eee2011-05-26 01:26:41 +01002850 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2851 // Test for -0.
2852 Label done;
2853 __ cmp(result, Operand(0));
2854 __ b(ne, &done);
2855 __ vmov(scratch1, input.high());
2856 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2857 DeoptimizeIf(ne, instr->environment());
2858 __ bind(&done);
2859 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002860}
2861
2862
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002863void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2864 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2865 Register result = ToRegister(instr->result());
Steve Block053d10c2011-06-13 19:13:29 +01002866 Register scratch1 = scratch0();
2867 Register scratch2 = result;
2868 __ EmitVFPTruncate(kRoundToNearest,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002869 double_scratch0().low(),
2870 input,
2871 scratch1,
2872 scratch2);
2873 DeoptimizeIf(ne, instr->environment());
2874 __ vmov(result, double_scratch0().low());
2875
Steve Block44f0eee2011-05-26 01:26:41 +01002876 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2877 // Test for -0.
Steve Block053d10c2011-06-13 19:13:29 +01002878 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002879 __ cmp(result, Operand(0));
2880 __ b(ne, &done);
2881 __ vmov(scratch1, input.high());
2882 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2883 DeoptimizeIf(ne, instr->environment());
Steve Block053d10c2011-06-13 19:13:29 +01002884 __ bind(&done);
Steve Block44f0eee2011-05-26 01:26:41 +01002885 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002886}
2887
2888
Ben Murdochb0fe1622011-05-05 13:52:32 +01002889void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002890 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002891 ASSERT(ToDoubleRegister(instr->result()).is(input));
2892 __ vsqrt(input, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002893}
2894
2895
Steve Block44f0eee2011-05-26 01:26:41 +01002896void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2897 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2898 Register scratch = scratch0();
2899 SwVfpRegister single_scratch = double_scratch0().low();
2900 DoubleRegister double_scratch = double_scratch0();
2901 ASSERT(ToDoubleRegister(instr->result()).is(input));
2902
2903 // Add +0 to convert -0 to +0.
2904 __ mov(scratch, Operand(0));
2905 __ vmov(single_scratch, scratch);
2906 __ vcvt_f64_s32(double_scratch, single_scratch);
2907 __ vadd(input, input, double_scratch);
2908 __ vsqrt(input, input);
2909}
2910
2911
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002912void LCodeGen::DoPower(LPower* instr) {
2913 LOperand* left = instr->InputAt(0);
2914 LOperand* right = instr->InputAt(1);
2915 Register scratch = scratch0();
2916 DoubleRegister result_reg = ToDoubleRegister(instr->result());
2917 Representation exponent_type = instr->hydrogen()->right()->representation();
2918 if (exponent_type.IsDouble()) {
2919 // Prepare arguments and call C function.
2920 __ PrepareCallCFunction(4, scratch);
2921 __ vmov(r0, r1, ToDoubleRegister(left));
2922 __ vmov(r2, r3, ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01002923 __ CallCFunction(
2924 ExternalReference::power_double_double_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002925 } else if (exponent_type.IsInteger32()) {
2926 ASSERT(ToRegister(right).is(r0));
2927 // Prepare arguments and call C function.
2928 __ PrepareCallCFunction(4, scratch);
2929 __ mov(r2, ToRegister(right));
2930 __ vmov(r0, r1, ToDoubleRegister(left));
Steve Block44f0eee2011-05-26 01:26:41 +01002931 __ CallCFunction(
2932 ExternalReference::power_double_int_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002933 } else {
2934 ASSERT(exponent_type.IsTagged());
2935 ASSERT(instr->hydrogen()->left()->representation().IsDouble());
2936
2937 Register right_reg = ToRegister(right);
2938
2939 // Check for smi on the right hand side.
2940 Label non_smi, call;
2941 __ JumpIfNotSmi(right_reg, &non_smi);
2942
2943 // Untag smi and convert it to a double.
2944 __ SmiUntag(right_reg);
2945 SwVfpRegister single_scratch = double_scratch0().low();
2946 __ vmov(single_scratch, right_reg);
2947 __ vcvt_f64_s32(result_reg, single_scratch);
2948 __ jmp(&call);
2949
2950 // Heap number map check.
2951 __ bind(&non_smi);
2952 __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
2953 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2954 __ cmp(scratch, Operand(ip));
2955 DeoptimizeIf(ne, instr->environment());
2956 int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
2957 __ add(scratch, right_reg, Operand(value_offset));
2958 __ vldr(result_reg, scratch, 0);
2959
2960 // Prepare arguments and call C function.
2961 __ bind(&call);
2962 __ PrepareCallCFunction(4, scratch);
2963 __ vmov(r0, r1, ToDoubleRegister(left));
2964 __ vmov(r2, r3, result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01002965 __ CallCFunction(
2966 ExternalReference::power_double_double_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002967 }
2968 // Store the result in the result register.
2969 __ GetCFunctionDoubleResult(result_reg);
2970}
2971
2972
2973void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2974 ASSERT(ToDoubleRegister(instr->result()).is(d2));
2975 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2976 TranscendentalCacheStub::UNTAGGED);
2977 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2978}
2979
2980
2981void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2982 ASSERT(ToDoubleRegister(instr->result()).is(d2));
2983 TranscendentalCacheStub stub(TranscendentalCache::COS,
2984 TranscendentalCacheStub::UNTAGGED);
2985 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2986}
2987
2988
2989void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2990 ASSERT(ToDoubleRegister(instr->result()).is(d2));
2991 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2992 TranscendentalCacheStub::UNTAGGED);
2993 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2994}
2995
2996
Ben Murdochb0fe1622011-05-05 13:52:32 +01002997void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2998 switch (instr->op()) {
2999 case kMathAbs:
3000 DoMathAbs(instr);
3001 break;
3002 case kMathFloor:
3003 DoMathFloor(instr);
3004 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003005 case kMathRound:
3006 DoMathRound(instr);
3007 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003008 case kMathSqrt:
3009 DoMathSqrt(instr);
3010 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003011 case kMathPowHalf:
3012 DoMathPowHalf(instr);
3013 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003014 case kMathCos:
3015 DoMathCos(instr);
3016 break;
3017 case kMathSin:
3018 DoMathSin(instr);
3019 break;
3020 case kMathLog:
3021 DoMathLog(instr);
3022 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003023 default:
3024 Abort("Unimplemented type of LUnaryMathOperation.");
3025 UNREACHABLE();
3026 }
3027}
3028
3029
3030void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003031 ASSERT(ToRegister(instr->result()).is(r0));
3032
3033 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003034 Handle<Code> ic =
3035 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003036 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3037 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003038}
3039
3040
3041void LCodeGen::DoCallNamed(LCallNamed* instr) {
3042 ASSERT(ToRegister(instr->result()).is(r0));
3043
3044 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003045 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
3046 arity, NOT_IN_LOOP);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003047 __ mov(r2, Operand(instr->name()));
3048 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3049 // Restore context register.
3050 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3051}
3052
3053
3054void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003055 ASSERT(ToRegister(instr->result()).is(r0));
3056
3057 int arity = instr->arity();
3058 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
3059 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3060 __ Drop(1);
3061 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003062}
3063
3064
3065void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003066 ASSERT(ToRegister(instr->result()).is(r0));
3067
3068 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003069 Handle<Code> ic =
3070 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003071 __ mov(r2, Operand(instr->name()));
3072 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
3073 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003074}
3075
3076
3077void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3078 ASSERT(ToRegister(instr->result()).is(r0));
3079 __ mov(r1, Operand(instr->target()));
3080 CallKnownFunction(instr->target(), instr->arity(), instr);
3081}
3082
3083
3084void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003085 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003086 ASSERT(ToRegister(instr->result()).is(r0));
3087
Steve Block44f0eee2011-05-26 01:26:41 +01003088 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003089 __ mov(r0, Operand(instr->arity()));
3090 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3091}
3092
3093
3094void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3095 CallRuntime(instr->function(), instr->arity(), instr);
3096}
3097
3098
3099void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003100 Register object = ToRegister(instr->object());
3101 Register value = ToRegister(instr->value());
3102 Register scratch = scratch0();
3103 int offset = instr->offset();
3104
3105 ASSERT(!object.is(value));
3106
3107 if (!instr->transition().is_null()) {
3108 __ mov(scratch, Operand(instr->transition()));
3109 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3110 }
3111
3112 // Do the store.
3113 if (instr->is_in_object()) {
3114 __ str(value, FieldMemOperand(object, offset));
3115 if (instr->needs_write_barrier()) {
3116 // Update the write barrier for the object for in-object properties.
3117 __ RecordWrite(object, Operand(offset), value, scratch);
3118 }
3119 } else {
3120 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3121 __ str(value, FieldMemOperand(scratch, offset));
3122 if (instr->needs_write_barrier()) {
3123 // Update the write barrier for the properties array.
3124 // object is used as a scratch register.
3125 __ RecordWrite(scratch, Operand(offset), value, object);
3126 }
3127 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003128}
3129
3130
3131void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3132 ASSERT(ToRegister(instr->object()).is(r1));
3133 ASSERT(ToRegister(instr->value()).is(r0));
3134
3135 // Name is always in r2.
3136 __ mov(r2, Operand(instr->name()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003137 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003138 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3139 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003140 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3141}
3142
3143
3144void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003145 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003146 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003147}
3148
3149
3150void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003151 Register value = ToRegister(instr->value());
3152 Register elements = ToRegister(instr->object());
3153 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3154 Register scratch = scratch0();
3155
3156 // Do the store.
3157 if (instr->key()->IsConstantOperand()) {
3158 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3159 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3160 int offset =
3161 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3162 __ str(value, FieldMemOperand(elements, offset));
3163 } else {
3164 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3165 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3166 }
3167
3168 if (instr->hydrogen()->NeedsWriteBarrier()) {
3169 // Compute address of modified element and store it into key register.
3170 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
3171 __ RecordWrite(elements, key, value);
3172 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003173}
3174
3175
Steve Block44f0eee2011-05-26 01:26:41 +01003176void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3177 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003178
3179 Register external_pointer = ToRegister(instr->external_pointer());
3180 Register key = ToRegister(instr->key());
Ben Murdoch8b112d22011-06-08 16:22:53 +01003181 ExternalArrayType array_type = instr->array_type();
3182 if (array_type == kExternalFloatArray) {
3183 CpuFeatures::Scope scope(VFP3);
3184 DwVfpRegister value(ToDoubleRegister(instr->value()));
3185 __ add(scratch0(), external_pointer, Operand(key, LSL, 2));
3186 __ vcvt_f32_f64(double_scratch0().low(), value);
3187 __ vstr(double_scratch0().low(), scratch0(), 0);
3188 } else {
3189 Register value(ToRegister(instr->value()));
3190 switch (array_type) {
3191 case kExternalPixelArray:
3192 // Clamp the value to [0..255].
3193 __ Usat(value, 8, Operand(value));
3194 __ strb(value, MemOperand(external_pointer, key));
3195 break;
3196 case kExternalByteArray:
3197 case kExternalUnsignedByteArray:
3198 __ strb(value, MemOperand(external_pointer, key));
3199 break;
3200 case kExternalShortArray:
3201 case kExternalUnsignedShortArray:
3202 __ strh(value, MemOperand(external_pointer, key, LSL, 1));
3203 break;
3204 case kExternalIntArray:
3205 case kExternalUnsignedIntArray:
3206 __ str(value, MemOperand(external_pointer, key, LSL, 2));
3207 break;
3208 case kExternalFloatArray:
3209 UNREACHABLE();
3210 break;
3211 }
3212 }
Steve Block44f0eee2011-05-26 01:26:41 +01003213}
3214
3215
Ben Murdochb0fe1622011-05-05 13:52:32 +01003216void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3217 ASSERT(ToRegister(instr->object()).is(r2));
3218 ASSERT(ToRegister(instr->key()).is(r1));
3219 ASSERT(ToRegister(instr->value()).is(r0));
3220
Ben Murdoch8b112d22011-06-08 16:22:53 +01003221 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003222 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3223 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003224 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3225}
3226
3227
Steve Block1e0659c2011-05-24 12:43:12 +01003228void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3229 class DeferredStringCharCodeAt: public LDeferredCode {
3230 public:
3231 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3232 : LDeferredCode(codegen), instr_(instr) { }
3233 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3234 private:
3235 LStringCharCodeAt* instr_;
3236 };
3237
3238 Register scratch = scratch0();
3239 Register string = ToRegister(instr->string());
3240 Register index = no_reg;
3241 int const_index = -1;
3242 if (instr->index()->IsConstantOperand()) {
3243 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3244 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3245 if (!Smi::IsValid(const_index)) {
3246 // Guaranteed to be out of bounds because of the assert above.
3247 // So the bounds check that must dominate this instruction must
3248 // have deoptimized already.
3249 if (FLAG_debug_code) {
3250 __ Abort("StringCharCodeAt: out of bounds index.");
3251 }
3252 // No code needs to be generated.
3253 return;
3254 }
3255 } else {
3256 index = ToRegister(instr->index());
3257 }
3258 Register result = ToRegister(instr->result());
3259
3260 DeferredStringCharCodeAt* deferred =
3261 new DeferredStringCharCodeAt(this, instr);
3262
3263 Label flat_string, ascii_string, done;
3264
3265 // Fetch the instance type of the receiver into result register.
3266 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3267 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3268
3269 // We need special handling for non-flat strings.
3270 STATIC_ASSERT(kSeqStringTag == 0);
3271 __ tst(result, Operand(kStringRepresentationMask));
3272 __ b(eq, &flat_string);
3273
3274 // Handle non-flat strings.
3275 __ tst(result, Operand(kIsConsStringMask));
3276 __ b(eq, deferred->entry());
3277
3278 // ConsString.
3279 // Check whether the right hand side is the empty string (i.e. if
3280 // this is really a flat string in a cons string). If that is not
3281 // the case we would rather go to the runtime system now to flatten
3282 // the string.
3283 __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset));
3284 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
3285 __ cmp(scratch, ip);
3286 __ b(ne, deferred->entry());
3287 // Get the first of the two strings and load its instance type.
3288 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
3289 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3290 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3291 // If the first cons component is also non-flat, then go to runtime.
3292 STATIC_ASSERT(kSeqStringTag == 0);
3293 __ tst(result, Operand(kStringRepresentationMask));
3294 __ b(ne, deferred->entry());
3295
3296 // Check for 1-byte or 2-byte string.
3297 __ bind(&flat_string);
3298 STATIC_ASSERT(kAsciiStringTag != 0);
3299 __ tst(result, Operand(kStringEncodingMask));
3300 __ b(ne, &ascii_string);
3301
3302 // 2-byte string.
3303 // Load the 2-byte character code into the result register.
3304 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3305 if (instr->index()->IsConstantOperand()) {
3306 __ ldrh(result,
3307 FieldMemOperand(string,
3308 SeqTwoByteString::kHeaderSize + 2 * const_index));
3309 } else {
3310 __ add(scratch,
3311 string,
3312 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3313 __ ldrh(result, MemOperand(scratch, index, LSL, 1));
3314 }
3315 __ jmp(&done);
3316
3317 // ASCII string.
3318 // Load the byte into the result register.
3319 __ bind(&ascii_string);
3320 if (instr->index()->IsConstantOperand()) {
3321 __ ldrb(result, FieldMemOperand(string,
3322 SeqAsciiString::kHeaderSize + const_index));
3323 } else {
3324 __ add(scratch,
3325 string,
3326 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3327 __ ldrb(result, MemOperand(scratch, index));
3328 }
3329 __ bind(&done);
3330 __ bind(deferred->exit());
3331}
3332
3333
3334void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3335 Register string = ToRegister(instr->string());
3336 Register result = ToRegister(instr->result());
3337 Register scratch = scratch0();
3338
3339 // TODO(3095996): Get rid of this. For now, we need to make the
3340 // result register contain a valid pointer because it is already
3341 // contained in the register pointer map.
3342 __ mov(result, Operand(0));
3343
Ben Murdoch8b112d22011-06-08 16:22:53 +01003344 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003345 __ push(string);
3346 // Push the index as a smi. This is safe because of the checks in
3347 // DoStringCharCodeAt above.
3348 if (instr->index()->IsConstantOperand()) {
3349 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3350 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3351 __ push(scratch);
3352 } else {
3353 Register index = ToRegister(instr->index());
3354 __ SmiTag(index);
3355 __ push(index);
3356 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003357 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003358 if (FLAG_debug_code) {
3359 __ AbortIfNotSmi(r0);
3360 }
3361 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003362 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003363}
3364
3365
Steve Block44f0eee2011-05-26 01:26:41 +01003366void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3367 class DeferredStringCharFromCode: public LDeferredCode {
3368 public:
3369 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3370 : LDeferredCode(codegen), instr_(instr) { }
3371 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3372 private:
3373 LStringCharFromCode* instr_;
3374 };
3375
3376 DeferredStringCharFromCode* deferred =
3377 new DeferredStringCharFromCode(this, instr);
3378
3379 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3380 Register char_code = ToRegister(instr->char_code());
3381 Register result = ToRegister(instr->result());
3382 ASSERT(!char_code.is(result));
3383
3384 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3385 __ b(hi, deferred->entry());
3386 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3387 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3388 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3389 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3390 __ cmp(result, ip);
3391 __ b(eq, deferred->entry());
3392 __ bind(deferred->exit());
3393}
3394
3395
3396void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3397 Register char_code = ToRegister(instr->char_code());
3398 Register result = ToRegister(instr->result());
3399
3400 // TODO(3095996): Get rid of this. For now, we need to make the
3401 // result register contain a valid pointer because it is already
3402 // contained in the register pointer map.
3403 __ mov(result, Operand(0));
3404
Ben Murdoch8b112d22011-06-08 16:22:53 +01003405 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003406 __ SmiTag(char_code);
3407 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003408 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003409 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003410}
3411
3412
Steve Block1e0659c2011-05-24 12:43:12 +01003413void LCodeGen::DoStringLength(LStringLength* instr) {
3414 Register string = ToRegister(instr->InputAt(0));
3415 Register result = ToRegister(instr->result());
3416 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3417}
3418
3419
Ben Murdochb0fe1622011-05-05 13:52:32 +01003420void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003421 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003422 ASSERT(input->IsRegister() || input->IsStackSlot());
3423 LOperand* output = instr->result();
3424 ASSERT(output->IsDoubleRegister());
3425 SwVfpRegister single_scratch = double_scratch0().low();
3426 if (input->IsStackSlot()) {
3427 Register scratch = scratch0();
3428 __ ldr(scratch, ToMemOperand(input));
3429 __ vmov(single_scratch, scratch);
3430 } else {
3431 __ vmov(single_scratch, ToRegister(input));
3432 }
3433 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003434}
3435
3436
3437void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3438 class DeferredNumberTagI: public LDeferredCode {
3439 public:
3440 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3441 : LDeferredCode(codegen), instr_(instr) { }
3442 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3443 private:
3444 LNumberTagI* instr_;
3445 };
3446
Steve Block1e0659c2011-05-24 12:43:12 +01003447 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003448 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3449 Register reg = ToRegister(input);
3450
3451 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3452 __ SmiTag(reg, SetCC);
3453 __ b(vs, deferred->entry());
3454 __ bind(deferred->exit());
3455}
3456
3457
3458void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3459 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003460 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003461 DoubleRegister dbl_scratch = d0;
3462 SwVfpRegister flt_scratch = s0;
3463
3464 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003465 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003466
3467 // There was overflow, so bits 30 and 31 of the original integer
3468 // disagree. Try to allocate a heap number in new space and store
3469 // the value in there. If that fails, call the runtime system.
3470 Label done;
3471 __ SmiUntag(reg);
3472 __ eor(reg, reg, Operand(0x80000000));
3473 __ vmov(flt_scratch, reg);
3474 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3475 if (FLAG_inline_new) {
3476 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3477 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3478 if (!reg.is(r5)) __ mov(reg, r5);
3479 __ b(&done);
3480 }
3481
3482 // Slow case: Call the runtime system to do the number allocation.
3483 __ bind(&slow);
3484
3485 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3486 // register is stored, as this register is in the pointer map, but contains an
3487 // integer value.
3488 __ mov(ip, Operand(0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003489 __ StoreToSafepointRegisterSlot(ip, reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003490 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003491 if (!reg.is(r0)) __ mov(reg, r0);
3492
3493 // Done. Put the value in dbl_scratch into the value of the allocated heap
3494 // number.
3495 __ bind(&done);
3496 __ sub(ip, reg, Operand(kHeapObjectTag));
3497 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003498 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003499}
3500
3501
3502void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3503 class DeferredNumberTagD: public LDeferredCode {
3504 public:
3505 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3506 : LDeferredCode(codegen), instr_(instr) { }
3507 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3508 private:
3509 LNumberTagD* instr_;
3510 };
3511
Steve Block1e0659c2011-05-24 12:43:12 +01003512 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003513 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003514 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003515 Register temp1 = ToRegister(instr->TempAt(0));
3516 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003517
3518 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3519 if (FLAG_inline_new) {
3520 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3521 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3522 } else {
3523 __ jmp(deferred->entry());
3524 }
3525 __ bind(deferred->exit());
3526 __ sub(ip, reg, Operand(kHeapObjectTag));
3527 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3528}
3529
3530
3531void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3532 // TODO(3095996): Get rid of this. For now, we need to make the
3533 // result register contain a valid pointer because it is already
3534 // contained in the register pointer map.
3535 Register reg = ToRegister(instr->result());
3536 __ mov(reg, Operand(0));
3537
Ben Murdoch8b112d22011-06-08 16:22:53 +01003538 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3539 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003540 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003541}
3542
3543
3544void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003545 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003546 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3547 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3548 __ SmiTag(ToRegister(input));
3549}
3550
3551
3552void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003553 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003554 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3555 if (instr->needs_check()) {
3556 __ tst(ToRegister(input), Operand(kSmiTagMask));
3557 DeoptimizeIf(ne, instr->environment());
3558 }
3559 __ SmiUntag(ToRegister(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003560}
3561
3562
3563void LCodeGen::EmitNumberUntagD(Register input_reg,
3564 DoubleRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003565 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003566 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003567 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003568 SwVfpRegister flt_scratch = s0;
3569 ASSERT(!result_reg.is(d0));
3570
3571 Label load_smi, heap_number, done;
3572
3573 // Smi check.
3574 __ tst(input_reg, Operand(kSmiTagMask));
3575 __ b(eq, &load_smi);
3576
3577 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003578 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003579 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003580 __ cmp(scratch, Operand(ip));
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003581 if (deoptimize_on_undefined) {
3582 DeoptimizeIf(ne, env);
3583 } else {
3584 Label heap_number;
3585 __ b(eq, &heap_number);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003586
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003587 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3588 __ cmp(input_reg, Operand(ip));
3589 DeoptimizeIf(ne, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003590
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003591 // Convert undefined to NaN.
3592 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3593 __ sub(ip, ip, Operand(kHeapObjectTag));
3594 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3595 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003596
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003597 __ bind(&heap_number);
3598 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003599 // Heap number to double register conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003600 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3601 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3602 __ jmp(&done);
3603
3604 // Smi to double register conversion
3605 __ bind(&load_smi);
3606 __ SmiUntag(input_reg); // Untag smi before converting to float.
3607 __ vmov(flt_scratch, input_reg);
3608 __ vcvt_f64_s32(result_reg, flt_scratch);
3609 __ SmiTag(input_reg); // Retag smi.
3610 __ bind(&done);
3611}
3612
3613
3614class DeferredTaggedToI: public LDeferredCode {
3615 public:
3616 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3617 : LDeferredCode(codegen), instr_(instr) { }
3618 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3619 private:
3620 LTaggedToI* instr_;
3621};
3622
3623
3624void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003625 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003626 Register scratch1 = scratch0();
3627 Register scratch2 = ToRegister(instr->TempAt(0));
3628 DwVfpRegister double_scratch = double_scratch0();
3629 SwVfpRegister single_scratch = double_scratch.low();
3630
3631 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3632 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3633
3634 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003635
3636 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01003637 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003638 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01003639 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003640
3641 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003642 Register scratch3 = ToRegister(instr->TempAt(1));
3643 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3644 ASSERT(!scratch3.is(input_reg) &&
3645 !scratch3.is(scratch1) &&
3646 !scratch3.is(scratch2));
3647 // Performs a truncating conversion of a floating point number as used by
3648 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003649 Label heap_number;
3650 __ b(eq, &heap_number);
3651 // Check for undefined. Undefined is converted to zero for truncating
3652 // conversions.
3653 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3654 __ cmp(input_reg, Operand(ip));
3655 DeoptimizeIf(ne, instr->environment());
3656 __ mov(input_reg, Operand(0));
3657 __ b(&done);
3658
3659 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01003660 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3661 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3662
3663 __ EmitECMATruncate(input_reg,
3664 double_scratch2,
3665 single_scratch,
3666 scratch1,
3667 scratch2,
3668 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003669
3670 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003671 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003672 // Deoptimize if we don't have a heap number.
3673 DeoptimizeIf(ne, instr->environment());
3674
3675 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003676 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3677 __ EmitVFPTruncate(kRoundToZero,
3678 single_scratch,
3679 double_scratch,
3680 scratch1,
3681 scratch2,
3682 kCheckForInexactConversion);
3683 DeoptimizeIf(ne, instr->environment());
3684 // Load the result.
3685 __ vmov(input_reg, single_scratch);
3686
Ben Murdochb0fe1622011-05-05 13:52:32 +01003687 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003688 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003689 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01003690 __ vmov(scratch1, double_scratch.high());
3691 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003692 DeoptimizeIf(ne, instr->environment());
3693 }
3694 }
3695 __ bind(&done);
3696}
3697
3698
3699void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003700 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003701 ASSERT(input->IsRegister());
3702 ASSERT(input->Equals(instr->result()));
3703
3704 Register input_reg = ToRegister(input);
3705
3706 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3707
3708 // Smi check.
3709 __ tst(input_reg, Operand(kSmiTagMask));
3710 __ b(ne, deferred->entry());
3711
3712 // Smi to int32 conversion
3713 __ SmiUntag(input_reg); // Untag smi.
3714
3715 __ bind(deferred->exit());
3716}
3717
3718
3719void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003720 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003721 ASSERT(input->IsRegister());
3722 LOperand* result = instr->result();
3723 ASSERT(result->IsDoubleRegister());
3724
3725 Register input_reg = ToRegister(input);
3726 DoubleRegister result_reg = ToDoubleRegister(result);
3727
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003728 EmitNumberUntagD(input_reg, result_reg,
3729 instr->hydrogen()->deoptimize_on_undefined(),
3730 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003731}
3732
3733
3734void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003735 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003736 Register scratch1 = scratch0();
3737 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003738 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
3739 DwVfpRegister double_scratch = double_scratch0();
3740 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003741
Steve Block44f0eee2011-05-26 01:26:41 +01003742 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003743
Steve Block44f0eee2011-05-26 01:26:41 +01003744 if (instr->truncating()) {
3745 Register scratch3 = ToRegister(instr->TempAt(1));
3746 __ EmitECMATruncate(result_reg,
3747 double_input,
3748 single_scratch,
3749 scratch1,
3750 scratch2,
3751 scratch3);
3752 } else {
3753 VFPRoundingMode rounding_mode = kRoundToMinusInf;
3754 __ EmitVFPTruncate(rounding_mode,
3755 single_scratch,
3756 double_input,
3757 scratch1,
3758 scratch2,
3759 kCheckForInexactConversion);
3760 // Deoptimize if we had a vfp invalid exception,
3761 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01003762 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003763 // Retrieve the result.
3764 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003765 }
Steve Block44f0eee2011-05-26 01:26:41 +01003766 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003767}
3768
3769
3770void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003771 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003772 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003773 DeoptimizeIf(ne, instr->environment());
3774}
3775
3776
3777void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3778 LOperand* input = instr->InputAt(0);
3779 __ tst(ToRegister(input), Operand(kSmiTagMask));
3780 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003781}
3782
3783
3784void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003785 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003786 Register scratch = scratch0();
3787 InstanceType first = instr->hydrogen()->first();
3788 InstanceType last = instr->hydrogen()->last();
3789
3790 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3791 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3792 __ cmp(scratch, Operand(first));
3793
3794 // If there is only one type in the interval check for equality.
3795 if (first == last) {
3796 DeoptimizeIf(ne, instr->environment());
3797 } else {
3798 DeoptimizeIf(lo, instr->environment());
3799 // Omit check for the last type.
3800 if (last != LAST_TYPE) {
3801 __ cmp(scratch, Operand(last));
3802 DeoptimizeIf(hi, instr->environment());
3803 }
3804 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003805}
3806
3807
3808void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003809 ASSERT(instr->InputAt(0)->IsRegister());
3810 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003811 __ cmp(reg, Operand(instr->hydrogen()->target()));
3812 DeoptimizeIf(ne, instr->environment());
3813}
3814
3815
3816void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003817 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01003818 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003819 ASSERT(input->IsRegister());
3820 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01003821 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
3822 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003823 DeoptimizeIf(ne, instr->environment());
3824}
3825
3826
Ben Murdochb8e0da22011-05-16 14:20:40 +01003827void LCodeGen::LoadHeapObject(Register result,
3828 Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01003829 if (heap()->InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01003830 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01003831 factory()->NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01003832 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01003833 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01003834 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003835 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01003836 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003837}
3838
3839
3840void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003841 Register temp1 = ToRegister(instr->TempAt(0));
3842 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01003843
3844 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01003845 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01003846
3847 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003848 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01003849
3850 // Check prototype maps up to the holder.
3851 while (!current_prototype.is_identical_to(holder)) {
3852 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
3853 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
3854 DeoptimizeIf(ne, instr->environment());
3855 current_prototype =
3856 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3857 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003858 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01003859 }
3860
3861 // Check the holder map.
3862 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
3863 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
3864 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003865}
3866
3867
3868void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003869 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3870 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
3871 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3872 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
3873 __ Push(r3, r2, r1);
3874
3875 // Pick the right runtime function or stub to call.
3876 int length = instr->hydrogen()->length();
3877 if (instr->hydrogen()->IsCopyOnWrite()) {
3878 ASSERT(instr->hydrogen()->depth() == 1);
3879 FastCloneShallowArrayStub::Mode mode =
3880 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3881 FastCloneShallowArrayStub stub(mode, length);
3882 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3883 } else if (instr->hydrogen()->depth() > 1) {
3884 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3885 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3886 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3887 } else {
3888 FastCloneShallowArrayStub::Mode mode =
3889 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3890 FastCloneShallowArrayStub stub(mode, length);
3891 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3892 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003893}
3894
3895
3896void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003897 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3898 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
3899 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3900 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
3901 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3902 __ Push(r4, r3, r2, r1);
3903
3904 // Pick the right runtime function to call.
3905 if (instr->hydrogen()->depth() > 1) {
3906 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3907 } else {
3908 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3909 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003910}
3911
3912
Steve Block44f0eee2011-05-26 01:26:41 +01003913void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3914 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
3915 __ push(r0);
3916 CallRuntime(Runtime::kToFastProperties, 1, instr);
3917}
3918
3919
Ben Murdochb0fe1622011-05-05 13:52:32 +01003920void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003921 Label materialized;
3922 // Registers will be used as follows:
3923 // r3 = JS function.
3924 // r7 = literals array.
3925 // r1 = regexp literal.
3926 // r0 = regexp literal clone.
3927 // r2 and r4-r6 are used as temporaries.
3928 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3929 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
3930 int literal_offset = FixedArray::kHeaderSize +
3931 instr->hydrogen()->literal_index() * kPointerSize;
3932 __ ldr(r1, FieldMemOperand(r7, literal_offset));
3933 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3934 __ cmp(r1, ip);
3935 __ b(ne, &materialized);
3936
3937 // Create regexp literal using runtime function
3938 // Result will be in r0.
3939 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3940 __ mov(r5, Operand(instr->hydrogen()->pattern()));
3941 __ mov(r4, Operand(instr->hydrogen()->flags()));
3942 __ Push(r7, r6, r5, r4);
3943 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3944 __ mov(r1, r0);
3945
3946 __ bind(&materialized);
3947 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3948 Label allocated, runtime_allocate;
3949
3950 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
3951 __ jmp(&allocated);
3952
3953 __ bind(&runtime_allocate);
3954 __ mov(r0, Operand(Smi::FromInt(size)));
3955 __ Push(r1, r0);
3956 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3957 __ pop(r1);
3958
3959 __ bind(&allocated);
3960 // Copy the content into the newly allocated memory.
3961 // (Unroll copy loop once for better throughput).
3962 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3963 __ ldr(r3, FieldMemOperand(r1, i));
3964 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
3965 __ str(r3, FieldMemOperand(r0, i));
3966 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
3967 }
3968 if ((size % (2 * kPointerSize)) != 0) {
3969 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
3970 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
3971 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003972}
3973
3974
3975void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003976 // Use the fast case closure allocation code that allocates in new
3977 // space for nested functions that don't need literals cloning.
3978 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01003979 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01003980 if (!pretenure && shared_info->num_literals() == 0) {
3981 FastNewClosureStub stub(
3982 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003983 __ mov(r1, Operand(shared_info));
3984 __ push(r1);
3985 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3986 } else {
3987 __ mov(r2, Operand(shared_info));
3988 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01003989 ? factory()->true_value()
3990 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003991 __ Push(cp, r2, r1);
3992 CallRuntime(Runtime::kNewClosure, 3, instr);
3993 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003994}
3995
3996
3997void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003998 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003999 __ push(input);
4000 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004001}
4002
4003
4004void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004005 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004006 Register result = ToRegister(instr->result());
4007 Label true_label;
4008 Label false_label;
4009 Label done;
4010
4011 Condition final_branch_condition = EmitTypeofIs(&true_label,
4012 &false_label,
4013 input,
4014 instr->type_literal());
4015 __ b(final_branch_condition, &true_label);
4016 __ bind(&false_label);
4017 __ LoadRoot(result, Heap::kFalseValueRootIndex);
4018 __ b(&done);
4019
4020 __ bind(&true_label);
4021 __ LoadRoot(result, Heap::kTrueValueRootIndex);
4022
4023 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004024}
4025
4026
4027void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004028 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004029 int true_block = chunk_->LookupDestination(instr->true_block_id());
4030 int false_block = chunk_->LookupDestination(instr->false_block_id());
4031 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4032 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4033
4034 Condition final_branch_condition = EmitTypeofIs(true_label,
4035 false_label,
4036 input,
4037 instr->type_literal());
4038
4039 EmitBranch(true_block, false_block, final_branch_condition);
4040}
4041
4042
4043Condition LCodeGen::EmitTypeofIs(Label* true_label,
4044 Label* false_label,
4045 Register input,
4046 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004047 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004048 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004049 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004050 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004051 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4052 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4053 __ cmp(input, Operand(ip));
4054 final_branch_condition = eq;
4055
Steve Block44f0eee2011-05-26 01:26:41 +01004056 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004057 __ JumpIfSmi(input, false_label);
4058 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4059 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004060 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4061 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004062 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004063
Steve Block44f0eee2011-05-26 01:26:41 +01004064 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004065 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004066 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004067 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004068 final_branch_condition = eq;
4069
Steve Block44f0eee2011-05-26 01:26:41 +01004070 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004071 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004072 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004073 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004074 // Check for undetectable objects => true.
4075 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4076 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4077 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4078 final_branch_condition = ne;
4079
Steve Block44f0eee2011-05-26 01:26:41 +01004080 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004081 __ JumpIfSmi(input, false_label);
4082 __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4083 final_branch_condition = ge;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004084
Steve Block44f0eee2011-05-26 01:26:41 +01004085 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004086 __ JumpIfSmi(input, false_label);
4087 __ CompareRoot(input, Heap::kNullValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004088 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004089 __ CompareObjectType(input, input, scratch, FIRST_JS_OBJECT_TYPE);
4090 __ b(lo, false_label);
4091 __ CompareInstanceType(input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4092 __ b(hs, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004093 // Check for undetectable objects => false.
4094 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4095 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004096 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004097
4098 } else {
4099 final_branch_condition = ne;
4100 __ b(false_label);
4101 // A dead branch instruction will be generated after this point.
4102 }
4103
4104 return final_branch_condition;
4105}
4106
4107
Steve Block1e0659c2011-05-24 12:43:12 +01004108void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4109 Register result = ToRegister(instr->result());
4110 Label true_label;
4111 Label false_label;
4112 Label done;
4113
4114 EmitIsConstructCall(result, scratch0());
4115 __ b(eq, &true_label);
4116
4117 __ LoadRoot(result, Heap::kFalseValueRootIndex);
4118 __ b(&done);
4119
4120
4121 __ bind(&true_label);
4122 __ LoadRoot(result, Heap::kTrueValueRootIndex);
4123
4124 __ bind(&done);
4125}
4126
4127
4128void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4129 Register temp1 = ToRegister(instr->TempAt(0));
4130 int true_block = chunk_->LookupDestination(instr->true_block_id());
4131 int false_block = chunk_->LookupDestination(instr->false_block_id());
4132
4133 EmitIsConstructCall(temp1, scratch0());
4134 EmitBranch(true_block, false_block, eq);
4135}
4136
4137
4138void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4139 ASSERT(!temp1.is(temp2));
4140 // Get the frame pointer for the calling frame.
4141 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4142
4143 // Skip the arguments adaptor frame if it exists.
4144 Label check_frame_marker;
4145 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4146 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4147 __ b(ne, &check_frame_marker);
4148 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4149
4150 // Check the marker in the calling frame.
4151 __ bind(&check_frame_marker);
4152 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4153 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4154}
4155
4156
Ben Murdochb0fe1622011-05-05 13:52:32 +01004157void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4158 // No code for lazy bailout instruction. Used to capture environment after a
4159 // call for populating the safepoint data with deoptimization data.
4160}
4161
4162
4163void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004164 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004165}
4166
4167
4168void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004169 Register object = ToRegister(instr->object());
4170 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004171 Register strict = scratch0();
4172 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4173 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004174 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4175 LPointerMap* pointers = instr->pointer_map();
4176 LEnvironment* env = instr->deoptimization_environment();
4177 RecordPosition(pointers->position());
4178 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01004179 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01004180 pointers,
4181 env->deoptimization_index());
Ben Murdochb8e0da22011-05-16 14:20:40 +01004182 __ InvokeBuiltin(Builtins::DELETE, CALL_JS, &safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004183}
4184
4185
4186void LCodeGen::DoStackCheck(LStackCheck* instr) {
4187 // Perform stack overflow check.
4188 Label ok;
4189 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4190 __ cmp(sp, Operand(ip));
4191 __ b(hs, &ok);
4192 StackCheckStub stub;
4193 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4194 __ bind(&ok);
4195}
4196
4197
4198void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004199 // This is a pseudo-instruction that ensures that the environment here is
4200 // properly registered for deoptimization and records the assembler's PC
4201 // offset.
4202 LEnvironment* environment = instr->environment();
4203 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4204 instr->SpilledDoubleRegisterArray());
4205
4206 // If the environment were already registered, we would have no way of
4207 // backpatching it with the spill slot operands.
4208 ASSERT(!environment->HasBeenRegistered());
4209 RegisterEnvironmentForDeoptimization(environment);
4210 ASSERT(osr_pc_offset_ == -1);
4211 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004212}
4213
4214
4215#undef __
4216
4217} } // namespace v8::internal