blob: 2d415cbaf12ee8c96dcb72288c1c60525af004b1 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Steve Block44f0eee2011-05-26 01:26:41 +010028#include "v8.h"
29
Ben Murdochb0fe1622011-05-05 13:52:32 +010030#include "arm/lithium-codegen-arm.h"
Ben Murdoche0cee9b2011-05-25 10:26:03 +010031#include "arm/lithium-gap-resolver-arm.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "code-stubs.h"
33#include "stub-cache.h"
34
35namespace v8 {
36namespace internal {
37
38
Steve Block44f0eee2011-05-26 01:26:41 +010039class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010040 public:
41 SafepointGenerator(LCodeGen* codegen,
42 LPointerMap* pointers,
43 int deoptimization_index)
44 : codegen_(codegen),
45 pointers_(pointers),
46 deoptimization_index_(deoptimization_index) { }
47 virtual ~SafepointGenerator() { }
48
Steve Block44f0eee2011-05-26 01:26:41 +010049 virtual void BeforeCall(int call_size) {
50 ASSERT(call_size >= 0);
51 // Ensure that we have enough space after the previous safepoint position
52 // for the generated code there.
53 int call_end = codegen_->masm()->pc_offset() + call_size;
54 int prev_jump_end =
55 codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
56 if (call_end < prev_jump_end) {
57 int padding_size = prev_jump_end - call_end;
58 ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
59 while (padding_size > 0) {
60 codegen_->masm()->nop();
61 padding_size -= Assembler::kInstrSize;
62 }
63 }
64 }
65
66 virtual void AfterCall() {
Ben Murdochb0fe1622011-05-05 13:52:32 +010067 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
68 }
69
70 private:
71 LCodeGen* codegen_;
72 LPointerMap* pointers_;
73 int deoptimization_index_;
74};
75
76
77#define __ masm()->
78
79bool LCodeGen::GenerateCode() {
80 HPhase phase("Code generation", chunk());
81 ASSERT(is_unused());
82 status_ = GENERATING;
83 CpuFeatures::Scope scope1(VFP3);
84 CpuFeatures::Scope scope2(ARMv7);
85 return GeneratePrologue() &&
86 GenerateBody() &&
87 GenerateDeferredCode() &&
88 GenerateSafepointTable();
89}
90
91
92void LCodeGen::FinishCode(Handle<Code> code) {
93 ASSERT(is_done());
Ben Murdoch8b112d22011-06-08 16:22:53 +010094 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010095 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010096 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010097 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010098}
99
100
101void LCodeGen::Abort(const char* format, ...) {
102 if (FLAG_trace_bailout) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100103 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
104 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100105 va_list arguments;
106 va_start(arguments, format);
107 OS::VPrint(format, arguments);
108 va_end(arguments);
109 PrintF("\n");
110 }
111 status_ = ABORTED;
112}
113
114
115void LCodeGen::Comment(const char* format, ...) {
116 if (!FLAG_code_comments) return;
117 char buffer[4 * KB];
118 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
119 va_list arguments;
120 va_start(arguments, format);
121 builder.AddFormattedList(format, arguments);
122 va_end(arguments);
123
124 // Copy the string before recording it in the assembler to avoid
125 // issues when the stack allocated buffer goes out of scope.
126 size_t length = builder.position();
127 Vector<char> copy = Vector<char>::New(length + 1);
128 memcpy(copy.start(), builder.Finalize(), copy.length());
129 masm()->RecordComment(copy.start());
130}
131
132
133bool LCodeGen::GeneratePrologue() {
134 ASSERT(is_generating());
135
136#ifdef DEBUG
137 if (strlen(FLAG_stop_at) > 0 &&
138 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
139 __ stop("stop_at");
140 }
141#endif
142
143 // r1: Callee's JS function.
144 // cp: Callee's context.
145 // fp: Caller's frame pointer.
146 // lr: Caller's pc.
147
148 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
149 __ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
150
151 // Reserve space for the stack slots needed by the code.
Ben Murdoch8b112d22011-06-08 16:22:53 +0100152 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100153 if (slots > 0) {
154 if (FLAG_debug_code) {
155 __ mov(r0, Operand(slots));
156 __ mov(r2, Operand(kSlotsZapValue));
157 Label loop;
158 __ bind(&loop);
159 __ push(r2);
160 __ sub(r0, r0, Operand(1), SetCC);
161 __ b(ne, &loop);
162 } else {
163 __ sub(sp, sp, Operand(slots * kPointerSize));
164 }
165 }
166
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100167 // Possibly allocate a local context.
168 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
169 if (heap_slots > 0) {
170 Comment(";;; Allocate local context");
171 // Argument to NewContext is the function, which is in r1.
172 __ push(r1);
173 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
174 FastNewContextStub stub(heap_slots);
175 __ CallStub(&stub);
176 } else {
177 __ CallRuntime(Runtime::kNewContext, 1);
178 }
179 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
180 // Context is returned in both r0 and cp. It replaces the context
181 // passed to us. It's saved in the stack and kept live in cp.
182 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
183 // Copy any necessary parameters into the context.
184 int num_parameters = scope()->num_parameters();
185 for (int i = 0; i < num_parameters; i++) {
186 Slot* slot = scope()->parameter(i)->AsSlot();
187 if (slot != NULL && slot->type() == Slot::CONTEXT) {
188 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
189 (num_parameters - 1 - i) * kPointerSize;
190 // Load parameter from stack.
191 __ ldr(r0, MemOperand(fp, parameter_offset));
192 // Store it in the context.
193 __ mov(r1, Operand(Context::SlotOffset(slot->index())));
194 __ str(r0, MemOperand(cp, r1));
195 // Update the write barrier. This clobbers all involved
196 // registers, so we have to use two more registers to avoid
197 // clobbering cp.
198 __ mov(r2, Operand(cp));
199 __ RecordWrite(r2, Operand(r1), r3, r0);
200 }
201 }
202 Comment(";;; End allocate local context");
203 }
204
Ben Murdochb0fe1622011-05-05 13:52:32 +0100205 // Trace the call.
206 if (FLAG_trace) {
207 __ CallRuntime(Runtime::kTraceEnter, 0);
208 }
209 return !is_aborted();
210}
211
212
213bool LCodeGen::GenerateBody() {
214 ASSERT(is_generating());
215 bool emit_instructions = true;
216 for (current_instruction_ = 0;
217 !is_aborted() && current_instruction_ < instructions_->length();
218 current_instruction_++) {
219 LInstruction* instr = instructions_->at(current_instruction_);
220 if (instr->IsLabel()) {
221 LLabel* label = LLabel::cast(instr);
222 emit_instructions = !label->HasReplacement();
223 }
224
225 if (emit_instructions) {
226 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
227 instr->CompileToNative(this);
228 }
229 }
230 return !is_aborted();
231}
232
233
234LInstruction* LCodeGen::GetNextInstruction() {
235 if (current_instruction_ < instructions_->length() - 1) {
236 return instructions_->at(current_instruction_ + 1);
237 } else {
238 return NULL;
239 }
240}
241
242
243bool LCodeGen::GenerateDeferredCode() {
244 ASSERT(is_generating());
245 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
246 LDeferredCode* code = deferred_[i];
247 __ bind(code->entry());
248 code->Generate();
249 __ jmp(code->exit());
250 }
251
Ben Murdochb8e0da22011-05-16 14:20:40 +0100252 // Force constant pool emission at the end of deferred code to make
253 // sure that no constant pools are emitted after the official end of
254 // the instruction sequence.
255 masm()->CheckConstPool(true, false);
256
Ben Murdochb0fe1622011-05-05 13:52:32 +0100257 // Deferred code is the last part of the instruction sequence. Mark
258 // the generated code as done unless we bailed out.
259 if (!is_aborted()) status_ = DONE;
260 return !is_aborted();
261}
262
263
264bool LCodeGen::GenerateSafepointTable() {
265 ASSERT(is_done());
Ben Murdoch8b112d22011-06-08 16:22:53 +0100266 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100267 return !is_aborted();
268}
269
270
271Register LCodeGen::ToRegister(int index) const {
272 return Register::FromAllocationIndex(index);
273}
274
275
276DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
277 return DoubleRegister::FromAllocationIndex(index);
278}
279
280
281Register LCodeGen::ToRegister(LOperand* op) const {
282 ASSERT(op->IsRegister());
283 return ToRegister(op->index());
284}
285
286
287Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
288 if (op->IsRegister()) {
289 return ToRegister(op->index());
290 } else if (op->IsConstantOperand()) {
291 __ mov(scratch, ToOperand(op));
292 return scratch;
293 } else if (op->IsStackSlot() || op->IsArgument()) {
294 __ ldr(scratch, ToMemOperand(op));
295 return scratch;
296 }
297 UNREACHABLE();
298 return scratch;
299}
300
301
302DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
303 ASSERT(op->IsDoubleRegister());
304 return ToDoubleRegister(op->index());
305}
306
307
308DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
309 SwVfpRegister flt_scratch,
310 DoubleRegister dbl_scratch) {
311 if (op->IsDoubleRegister()) {
312 return ToDoubleRegister(op->index());
313 } else if (op->IsConstantOperand()) {
314 LConstantOperand* const_op = LConstantOperand::cast(op);
315 Handle<Object> literal = chunk_->LookupLiteral(const_op);
316 Representation r = chunk_->LookupLiteralRepresentation(const_op);
317 if (r.IsInteger32()) {
318 ASSERT(literal->IsNumber());
319 __ mov(ip, Operand(static_cast<int32_t>(literal->Number())));
320 __ vmov(flt_scratch, ip);
321 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
322 return dbl_scratch;
323 } else if (r.IsDouble()) {
324 Abort("unsupported double immediate");
325 } else if (r.IsTagged()) {
326 Abort("unsupported tagged immediate");
327 }
328 } else if (op->IsStackSlot() || op->IsArgument()) {
329 // TODO(regis): Why is vldr not taking a MemOperand?
330 // __ vldr(dbl_scratch, ToMemOperand(op));
331 MemOperand mem_op = ToMemOperand(op);
332 __ vldr(dbl_scratch, mem_op.rn(), mem_op.offset());
333 return dbl_scratch;
334 }
335 UNREACHABLE();
336 return dbl_scratch;
337}
338
339
340int LCodeGen::ToInteger32(LConstantOperand* op) const {
341 Handle<Object> value = chunk_->LookupLiteral(op);
342 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
343 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
344 value->Number());
345 return static_cast<int32_t>(value->Number());
346}
347
348
349Operand LCodeGen::ToOperand(LOperand* op) {
350 if (op->IsConstantOperand()) {
351 LConstantOperand* const_op = LConstantOperand::cast(op);
352 Handle<Object> literal = chunk_->LookupLiteral(const_op);
353 Representation r = chunk_->LookupLiteralRepresentation(const_op);
354 if (r.IsInteger32()) {
355 ASSERT(literal->IsNumber());
356 return Operand(static_cast<int32_t>(literal->Number()));
357 } else if (r.IsDouble()) {
358 Abort("ToOperand Unsupported double immediate.");
359 }
360 ASSERT(r.IsTagged());
361 return Operand(literal);
362 } else if (op->IsRegister()) {
363 return Operand(ToRegister(op));
364 } else if (op->IsDoubleRegister()) {
365 Abort("ToOperand IsDoubleRegister unimplemented");
366 return Operand(0);
367 }
368 // Stack slots not implemented, use ToMemOperand instead.
369 UNREACHABLE();
370 return Operand(0);
371}
372
373
374MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100375 ASSERT(!op->IsRegister());
376 ASSERT(!op->IsDoubleRegister());
377 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
378 int index = op->index();
379 if (index >= 0) {
380 // Local or spill slot. Skip the frame pointer, function, and
381 // context in the fixed part of the frame.
382 return MemOperand(fp, -(index + 3) * kPointerSize);
383 } else {
384 // Incoming parameter. Skip the return address.
385 return MemOperand(fp, -(index - 1) * kPointerSize);
386 }
387}
388
389
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100390MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
391 ASSERT(op->IsDoubleStackSlot());
392 int index = op->index();
393 if (index >= 0) {
394 // Local or spill slot. Skip the frame pointer, function, context,
395 // and the first word of the double in the fixed part of the frame.
396 return MemOperand(fp, -(index + 3) * kPointerSize + kPointerSize);
397 } else {
398 // Incoming parameter. Skip the return address and the first word of
399 // the double.
400 return MemOperand(fp, -(index - 1) * kPointerSize + kPointerSize);
401 }
402}
403
404
Ben Murdochb8e0da22011-05-16 14:20:40 +0100405void LCodeGen::WriteTranslation(LEnvironment* environment,
406 Translation* translation) {
407 if (environment == NULL) return;
408
409 // The translation includes one command per value in the environment.
410 int translation_size = environment->values()->length();
411 // The output frame height does not include the parameters.
412 int height = translation_size - environment->parameter_count();
413
414 WriteTranslation(environment->outer(), translation);
415 int closure_id = DefineDeoptimizationLiteral(environment->closure());
416 translation->BeginFrame(environment->ast_id(), closure_id, height);
417 for (int i = 0; i < translation_size; ++i) {
418 LOperand* value = environment->values()->at(i);
419 // spilled_registers_ and spilled_double_registers_ are either
420 // both NULL or both set.
421 if (environment->spilled_registers() != NULL && value != NULL) {
422 if (value->IsRegister() &&
423 environment->spilled_registers()[value->index()] != NULL) {
424 translation->MarkDuplicate();
425 AddToTranslation(translation,
426 environment->spilled_registers()[value->index()],
427 environment->HasTaggedValueAt(i));
428 } else if (
429 value->IsDoubleRegister() &&
430 environment->spilled_double_registers()[value->index()] != NULL) {
431 translation->MarkDuplicate();
432 AddToTranslation(
433 translation,
434 environment->spilled_double_registers()[value->index()],
435 false);
436 }
437 }
438
439 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
440 }
441}
442
443
Ben Murdochb0fe1622011-05-05 13:52:32 +0100444void LCodeGen::AddToTranslation(Translation* translation,
445 LOperand* op,
446 bool is_tagged) {
447 if (op == NULL) {
448 // TODO(twuerthinger): Introduce marker operands to indicate that this value
449 // is not present and must be reconstructed from the deoptimizer. Currently
450 // this is only used for the arguments object.
451 translation->StoreArgumentsObject();
452 } else if (op->IsStackSlot()) {
453 if (is_tagged) {
454 translation->StoreStackSlot(op->index());
455 } else {
456 translation->StoreInt32StackSlot(op->index());
457 }
458 } else if (op->IsDoubleStackSlot()) {
459 translation->StoreDoubleStackSlot(op->index());
460 } else if (op->IsArgument()) {
461 ASSERT(is_tagged);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100462 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100463 translation->StoreStackSlot(src_index);
464 } else if (op->IsRegister()) {
465 Register reg = ToRegister(op);
466 if (is_tagged) {
467 translation->StoreRegister(reg);
468 } else {
469 translation->StoreInt32Register(reg);
470 }
471 } else if (op->IsDoubleRegister()) {
472 DoubleRegister reg = ToDoubleRegister(op);
473 translation->StoreDoubleRegister(reg);
474 } else if (op->IsConstantOperand()) {
475 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
476 int src_index = DefineDeoptimizationLiteral(literal);
477 translation->StoreLiteral(src_index);
478 } else {
479 UNREACHABLE();
480 }
481}
482
483
484void LCodeGen::CallCode(Handle<Code> code,
485 RelocInfo::Mode mode,
486 LInstruction* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100487 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
488}
489
490
491void LCodeGen::CallCodeGeneric(Handle<Code> code,
492 RelocInfo::Mode mode,
493 LInstruction* instr,
494 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100495 ASSERT(instr != NULL);
496 LPointerMap* pointers = instr->pointer_map();
497 RecordPosition(pointers->position());
498 __ Call(code, mode);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100499 RegisterLazyDeoptimization(instr, safepoint_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100500}
501
502
Steve Block44f0eee2011-05-26 01:26:41 +0100503void LCodeGen::CallRuntime(const Runtime::Function* function,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100504 int num_arguments,
505 LInstruction* instr) {
506 ASSERT(instr != NULL);
507 LPointerMap* pointers = instr->pointer_map();
508 ASSERT(pointers != NULL);
509 RecordPosition(pointers->position());
510
511 __ CallRuntime(function, num_arguments);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100512 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100513}
514
515
Ben Murdoch8b112d22011-06-08 16:22:53 +0100516void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
517 int argc,
518 LInstruction* instr) {
519 __ CallRuntimeSaveDoubles(id);
520 RecordSafepointWithRegisters(
521 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
522}
523
524
525void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
526 SafepointMode safepoint_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100527 // Create the environment to bailout to. If the call has side effects
528 // execution has to continue after the call otherwise execution can continue
529 // from a previous bailout point repeating the call.
530 LEnvironment* deoptimization_environment;
531 if (instr->HasDeoptimizationEnvironment()) {
532 deoptimization_environment = instr->deoptimization_environment();
533 } else {
534 deoptimization_environment = instr->environment();
535 }
536
537 RegisterEnvironmentForDeoptimization(deoptimization_environment);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100538 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
539 RecordSafepoint(instr->pointer_map(),
540 deoptimization_environment->deoptimization_index());
541 } else {
542 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
543 RecordSafepointWithRegisters(
544 instr->pointer_map(),
545 0,
546 deoptimization_environment->deoptimization_index());
547 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100548}
549
550
551void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
552 if (!environment->HasBeenRegistered()) {
553 // Physical stack frame layout:
554 // -x ............. -4 0 ..................................... y
555 // [incoming arguments] [spill slots] [pushed outgoing arguments]
556
557 // Layout of the environment:
558 // 0 ..................................................... size-1
559 // [parameters] [locals] [expression stack including arguments]
560
561 // Layout of the translation:
562 // 0 ........................................................ size - 1 + 4
563 // [expression stack including arguments] [locals] [4 words] [parameters]
564 // |>------------ translation_size ------------<|
565
566 int frame_count = 0;
567 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
568 ++frame_count;
569 }
570 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100571 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100572 int deoptimization_index = deoptimizations_.length();
573 environment->Register(deoptimization_index, translation.index());
574 deoptimizations_.Add(environment);
575 }
576}
577
578
579void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
580 RegisterEnvironmentForDeoptimization(environment);
581 ASSERT(environment->HasBeenRegistered());
582 int id = environment->deoptimization_index();
583 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
584 ASSERT(entry != NULL);
585 if (entry == NULL) {
586 Abort("bailout was not prepared");
587 return;
588 }
589
590 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on ARM.
591
592 if (FLAG_deopt_every_n_times == 1 &&
593 info_->shared_info()->opt_count() == id) {
594 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
595 return;
596 }
597
Steve Block1e0659c2011-05-24 12:43:12 +0100598 if (cc == al) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100599 if (FLAG_trap_on_deopt) __ stop("trap_on_deopt");
600 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
601 } else {
602 if (FLAG_trap_on_deopt) {
603 Label done;
604 __ b(&done, NegateCondition(cc));
605 __ stop("trap_on_deopt");
606 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
607 __ bind(&done);
608 } else {
609 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc);
610 }
611 }
612}
613
614
615void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
616 int length = deoptimizations_.length();
617 if (length == 0) return;
618 ASSERT(FLAG_deopt);
619 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100620 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100621
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100622 Handle<ByteArray> translations = translations_.CreateByteArray();
623 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100624 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
625
626 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100627 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100628 for (int i = 0; i < deoptimization_literals_.length(); i++) {
629 literals->set(i, *deoptimization_literals_[i]);
630 }
631 data->SetLiteralArray(*literals);
632
633 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
634 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
635
636 // Populate the deoptimization entries.
637 for (int i = 0; i < length; i++) {
638 LEnvironment* env = deoptimizations_[i];
639 data->SetAstId(i, Smi::FromInt(env->ast_id()));
640 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
641 data->SetArgumentsStackHeight(i,
642 Smi::FromInt(env->arguments_stack_height()));
643 }
644 code->set_deoptimization_data(*data);
645}
646
647
648int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
649 int result = deoptimization_literals_.length();
650 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
651 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
652 }
653 deoptimization_literals_.Add(literal);
654 return result;
655}
656
657
658void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
659 ASSERT(deoptimization_literals_.length() == 0);
660
661 const ZoneList<Handle<JSFunction> >* inlined_closures =
662 chunk()->inlined_closures();
663
664 for (int i = 0, length = inlined_closures->length();
665 i < length;
666 i++) {
667 DefineDeoptimizationLiteral(inlined_closures->at(i));
668 }
669
670 inlined_function_count_ = deoptimization_literals_.length();
671}
672
673
Steve Block1e0659c2011-05-24 12:43:12 +0100674void LCodeGen::RecordSafepoint(
675 LPointerMap* pointers,
676 Safepoint::Kind kind,
677 int arguments,
678 int deoptimization_index) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100679 ASSERT(expected_safepoint_kind_ == kind);
680
Ben Murdochb0fe1622011-05-05 13:52:32 +0100681 const ZoneList<LOperand*>* operands = pointers->operands();
682 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100683 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100684 for (int i = 0; i < operands->length(); i++) {
685 LOperand* pointer = operands->at(i);
686 if (pointer->IsStackSlot()) {
687 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100688 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
689 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100690 }
691 }
Steve Block1e0659c2011-05-24 12:43:12 +0100692 if (kind & Safepoint::kWithRegisters) {
693 // Register cp always contains a pointer to the context.
694 safepoint.DefinePointerRegister(cp);
695 }
696}
697
698
699void LCodeGen::RecordSafepoint(LPointerMap* pointers,
700 int deoptimization_index) {
701 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100702}
703
704
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100705void LCodeGen::RecordSafepoint(int deoptimization_index) {
706 LPointerMap empty_pointers(RelocInfo::kNoPosition);
707 RecordSafepoint(&empty_pointers, deoptimization_index);
708}
709
710
Ben Murdochb0fe1622011-05-05 13:52:32 +0100711void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
712 int arguments,
713 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100714 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
715 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100716}
717
718
Ben Murdochb8e0da22011-05-16 14:20:40 +0100719void LCodeGen::RecordSafepointWithRegistersAndDoubles(
720 LPointerMap* pointers,
721 int arguments,
722 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100723 RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
724 deoptimization_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100725}
726
727
Ben Murdochb0fe1622011-05-05 13:52:32 +0100728void LCodeGen::RecordPosition(int position) {
729 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
730 masm()->positions_recorder()->RecordPosition(position);
731}
732
733
734void LCodeGen::DoLabel(LLabel* label) {
735 if (label->is_loop_header()) {
736 Comment(";;; B%d - LOOP entry", label->block_id());
737 } else {
738 Comment(";;; B%d", label->block_id());
739 }
740 __ bind(label->label());
741 current_block_ = label->block_id();
742 LCodeGen::DoGap(label);
743}
744
745
746void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100747 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100748}
749
750
751void LCodeGen::DoGap(LGap* gap) {
752 for (int i = LGap::FIRST_INNER_POSITION;
753 i <= LGap::LAST_INNER_POSITION;
754 i++) {
755 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
756 LParallelMove* move = gap->GetParallelMove(inner_pos);
757 if (move != NULL) DoParallelMove(move);
758 }
759
760 LInstruction* next = GetNextInstruction();
761 if (next != NULL && next->IsLazyBailout()) {
762 int pc = masm()->pc_offset();
763 safepoints_.SetPcAfterGap(pc);
764 }
765}
766
767
768void LCodeGen::DoParameter(LParameter* instr) {
769 // Nothing to do.
770}
771
772
773void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block9fac8402011-05-12 15:51:54 +0100774 ASSERT(ToRegister(instr->result()).is(r0));
775 switch (instr->hydrogen()->major_key()) {
776 case CodeStub::RegExpConstructResult: {
777 RegExpConstructResultStub stub;
778 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
779 break;
780 }
781 case CodeStub::RegExpExec: {
782 RegExpExecStub stub;
783 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
784 break;
785 }
786 case CodeStub::SubString: {
787 SubStringStub stub;
788 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
789 break;
790 }
Steve Block9fac8402011-05-12 15:51:54 +0100791 case CodeStub::NumberToString: {
792 NumberToStringStub stub;
793 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
794 break;
795 }
796 case CodeStub::StringAdd: {
797 StringAddStub stub(NO_STRING_ADD_FLAGS);
798 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
799 break;
800 }
801 case CodeStub::StringCompare: {
802 StringCompareStub stub;
803 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
804 break;
805 }
806 case CodeStub::TranscendentalCache: {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100807 __ ldr(r0, MemOperand(sp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100808 TranscendentalCacheStub stub(instr->transcendental_type(),
809 TranscendentalCacheStub::TAGGED);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100810 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block9fac8402011-05-12 15:51:54 +0100811 break;
812 }
813 default:
814 UNREACHABLE();
815 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100816}
817
818
819void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
820 // Nothing to do.
821}
822
823
824void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100825 if (instr->hydrogen()->HasPowerOf2Divisor()) {
826 Register dividend = ToRegister(instr->InputAt(0));
827
828 int32_t divisor =
829 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
830
831 if (divisor < 0) divisor = -divisor;
832
833 Label positive_dividend, done;
834 __ cmp(dividend, Operand(0));
835 __ b(pl, &positive_dividend);
836 __ rsb(dividend, dividend, Operand(0));
837 __ and_(dividend, dividend, Operand(divisor - 1));
838 __ rsb(dividend, dividend, Operand(0), SetCC);
839 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
840 __ b(ne, &done);
841 DeoptimizeIf(al, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100842 }
Steve Block44f0eee2011-05-26 01:26:41 +0100843 __ bind(&positive_dividend);
844 __ and_(dividend, dividend, Operand(divisor - 1));
845 __ bind(&done);
846 return;
847 }
848
Ben Murdochb8e0da22011-05-16 14:20:40 +0100849 // These registers hold untagged 32 bit values.
Steve Block1e0659c2011-05-24 12:43:12 +0100850 Register left = ToRegister(instr->InputAt(0));
851 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100852 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100853
Steve Block44f0eee2011-05-26 01:26:41 +0100854 Register scratch = scratch0();
855 Register scratch2 = ToRegister(instr->TempAt(0));
856 DwVfpRegister dividend = ToDoubleRegister(instr->TempAt(1));
857 DwVfpRegister divisor = ToDoubleRegister(instr->TempAt(2));
858 DwVfpRegister quotient = double_scratch0();
859
860 ASSERT(result.is(left));
861
862 ASSERT(!dividend.is(divisor));
863 ASSERT(!dividend.is(quotient));
864 ASSERT(!divisor.is(quotient));
865 ASSERT(!scratch.is(left));
866 ASSERT(!scratch.is(right));
867 ASSERT(!scratch.is(result));
868
869 Label done, vfp_modulo, both_positive, right_negative;
870
Ben Murdochb8e0da22011-05-16 14:20:40 +0100871 // Check for x % 0.
872 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100873 __ cmp(right, Operand(0));
874 DeoptimizeIf(eq, instr->environment());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100875 }
876
Steve Block44f0eee2011-05-26 01:26:41 +0100877 // (0 % x) must yield 0 (if x is finite, which is the case here).
Steve Block1e0659c2011-05-24 12:43:12 +0100878 __ cmp(left, Operand(0));
Steve Block44f0eee2011-05-26 01:26:41 +0100879 __ b(eq, &done);
880 // Preload right in a vfp register.
881 __ vmov(divisor.low(), right);
882 __ b(lt, &vfp_modulo);
883
884 __ cmp(left, Operand(right));
885 __ b(lt, &done);
886
887 // Check for (positive) power of two on the right hand side.
888 __ JumpIfNotPowerOfTwoOrZeroAndNeg(right,
889 scratch,
890 &right_negative,
891 &both_positive);
892 // Perform modulo operation (scratch contains right - 1).
893 __ and_(result, scratch, Operand(left));
894 __ b(&done);
895
896 __ bind(&right_negative);
897 // Negate right. The sign of the divisor does not matter.
898 __ rsb(right, right, Operand(0));
899
900 __ bind(&both_positive);
901 const int kUnfolds = 3;
Steve Block1e0659c2011-05-24 12:43:12 +0100902 // If the right hand side is smaller than the (nonnegative)
Steve Block44f0eee2011-05-26 01:26:41 +0100903 // left hand side, the left hand side is the result.
904 // Else try a few subtractions of the left hand side.
Steve Block1e0659c2011-05-24 12:43:12 +0100905 __ mov(scratch, left);
906 for (int i = 0; i < kUnfolds; i++) {
907 // Check if the left hand side is less or equal than the
908 // the right hand side.
Steve Block44f0eee2011-05-26 01:26:41 +0100909 __ cmp(scratch, Operand(right));
Steve Block1e0659c2011-05-24 12:43:12 +0100910 __ mov(result, scratch, LeaveCC, lt);
911 __ b(lt, &done);
912 // If not, reduce the left hand side by the right hand
913 // side and check again.
914 if (i < kUnfolds - 1) __ sub(scratch, scratch, right);
915 }
916
Steve Block44f0eee2011-05-26 01:26:41 +0100917 __ bind(&vfp_modulo);
918 // Load the arguments in VFP registers.
919 // The divisor value is preloaded before. Be careful that 'right' is only live
920 // on entry.
921 __ vmov(dividend.low(), left);
922 // From here on don't use right as it may have been reallocated (for example
923 // to scratch2).
924 right = no_reg;
Steve Block1e0659c2011-05-24 12:43:12 +0100925
Steve Block44f0eee2011-05-26 01:26:41 +0100926 __ vcvt_f64_s32(dividend, dividend.low());
927 __ vcvt_f64_s32(divisor, divisor.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100928
Steve Block44f0eee2011-05-26 01:26:41 +0100929 // We do not care about the sign of the divisor.
930 __ vabs(divisor, divisor);
931 // Compute the quotient and round it to a 32bit integer.
932 __ vdiv(quotient, dividend, divisor);
933 __ vcvt_s32_f64(quotient.low(), quotient);
934 __ vcvt_f64_s32(quotient, quotient.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100935
Steve Block44f0eee2011-05-26 01:26:41 +0100936 // Compute the remainder in result.
937 DwVfpRegister double_scratch = dividend;
938 __ vmul(double_scratch, divisor, quotient);
939 __ vcvt_s32_f64(double_scratch.low(), double_scratch);
940 __ vmov(scratch, double_scratch.low());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100941
Steve Block44f0eee2011-05-26 01:26:41 +0100942 if (!instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
943 __ sub(result, left, scratch);
944 } else {
945 Label ok;
946 // Check for -0.
947 __ sub(scratch2, left, scratch, SetCC);
948 __ b(ne, &ok);
949 __ cmp(left, Operand(0));
950 DeoptimizeIf(mi, instr->environment());
951 __ bind(&ok);
952 // Load the result and we are done.
953 __ mov(result, scratch2);
954 }
955
Ben Murdochb8e0da22011-05-16 14:20:40 +0100956 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100957}
958
959
960void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100961 class DeferredDivI: public LDeferredCode {
962 public:
963 DeferredDivI(LCodeGen* codegen, LDivI* instr)
964 : LDeferredCode(codegen), instr_(instr) { }
965 virtual void Generate() {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100966 codegen()->DoDeferredBinaryOpStub(instr_, Token::DIV);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100967 }
968 private:
969 LDivI* instr_;
970 };
971
Steve Block1e0659c2011-05-24 12:43:12 +0100972 const Register left = ToRegister(instr->InputAt(0));
973 const Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100974 const Register scratch = scratch0();
975 const Register result = ToRegister(instr->result());
976
977 // Check for x / 0.
978 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +0100979 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100980 DeoptimizeIf(eq, instr->environment());
981 }
982
983 // Check for (0 / -x) that will produce negative zero.
984 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
985 Label left_not_zero;
Steve Block44f0eee2011-05-26 01:26:41 +0100986 __ cmp(left, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100987 __ b(ne, &left_not_zero);
Steve Block44f0eee2011-05-26 01:26:41 +0100988 __ cmp(right, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100989 DeoptimizeIf(mi, instr->environment());
990 __ bind(&left_not_zero);
991 }
992
993 // Check for (-kMinInt / -1).
994 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
995 Label left_not_min_int;
996 __ cmp(left, Operand(kMinInt));
997 __ b(ne, &left_not_min_int);
998 __ cmp(right, Operand(-1));
999 DeoptimizeIf(eq, instr->environment());
1000 __ bind(&left_not_min_int);
1001 }
1002
1003 Label done, deoptimize;
1004 // Test for a few common cases first.
1005 __ cmp(right, Operand(1));
1006 __ mov(result, left, LeaveCC, eq);
1007 __ b(eq, &done);
1008
1009 __ cmp(right, Operand(2));
1010 __ tst(left, Operand(1), eq);
1011 __ mov(result, Operand(left, ASR, 1), LeaveCC, eq);
1012 __ b(eq, &done);
1013
1014 __ cmp(right, Operand(4));
1015 __ tst(left, Operand(3), eq);
1016 __ mov(result, Operand(left, ASR, 2), LeaveCC, eq);
1017 __ b(eq, &done);
1018
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001019 // Call the stub. The numbers in r0 and r1 have
Ben Murdochb8e0da22011-05-16 14:20:40 +01001020 // to be tagged to Smis. If that is not possible, deoptimize.
1021 DeferredDivI* deferred = new DeferredDivI(this, instr);
1022
1023 __ TrySmiTag(left, &deoptimize, scratch);
1024 __ TrySmiTag(right, &deoptimize, scratch);
1025
1026 __ b(al, deferred->entry());
1027 __ bind(deferred->exit());
1028
1029 // If the result in r0 is a Smi, untag it, else deoptimize.
Steve Block1e0659c2011-05-24 12:43:12 +01001030 __ JumpIfNotSmi(result, &deoptimize);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001031 __ SmiUntag(result);
1032 __ b(&done);
1033
1034 __ bind(&deoptimize);
1035 DeoptimizeIf(al, instr->environment());
1036 __ bind(&done);
1037}
1038
1039
Steve Block1e0659c2011-05-24 12:43:12 +01001040template<int T>
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001041void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
1042 Token::Value op) {
Steve Block1e0659c2011-05-24 12:43:12 +01001043 Register left = ToRegister(instr->InputAt(0));
1044 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001045
Ben Murdoch8b112d22011-06-08 16:22:53 +01001046 PushSafepointRegistersScope scope(this, Safepoint::kWithRegistersAndDoubles);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001047 // Move left to r1 and right to r0 for the stub call.
1048 if (left.is(r1)) {
1049 __ Move(r0, right);
1050 } else if (left.is(r0) && right.is(r1)) {
1051 __ Swap(r0, r1, r2);
1052 } else if (left.is(r0)) {
1053 ASSERT(!right.is(r1));
1054 __ mov(r1, r0);
1055 __ mov(r0, right);
1056 } else {
1057 ASSERT(!left.is(r0) && !right.is(r0));
1058 __ mov(r0, right);
1059 __ mov(r1, left);
1060 }
1061 TypeRecordingBinaryOpStub stub(op, OVERWRITE_LEFT);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001062 __ CallStub(&stub);
1063 RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
1064 0,
1065 Safepoint::kNoDeoptimizationIndex);
1066 // Overwrite the stored value of r0 with the result of the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001067 __ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001068}
1069
1070
1071void LCodeGen::DoMulI(LMulI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001072 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001073 Register left = ToRegister(instr->InputAt(0));
1074 Register right = EmitLoadRegister(instr->InputAt(1), scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001075
1076 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
Steve Block1e0659c2011-05-24 12:43:12 +01001077 !instr->InputAt(1)->IsConstantOperand()) {
1078 __ orr(ToRegister(instr->TempAt(0)), left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001079 }
1080
1081 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1082 // scratch:left = left * right.
Steve Block1e0659c2011-05-24 12:43:12 +01001083 __ smull(left, scratch, left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001084 __ mov(ip, Operand(left, ASR, 31));
1085 __ cmp(ip, Operand(scratch));
1086 DeoptimizeIf(ne, instr->environment());
1087 } else {
1088 __ mul(left, left, right);
1089 }
1090
1091 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1092 // Bail out if the result is supposed to be negative zero.
1093 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01001094 __ cmp(left, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001095 __ b(ne, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01001096 if (instr->InputAt(1)->IsConstantOperand()) {
1097 if (ToInteger32(LConstantOperand::cast(instr->InputAt(1))) <= 0) {
1098 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001099 }
1100 } else {
1101 // Test the non-zero operand for negative sign.
Steve Block1e0659c2011-05-24 12:43:12 +01001102 __ cmp(ToRegister(instr->TempAt(0)), Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001103 DeoptimizeIf(mi, instr->environment());
1104 }
1105 __ bind(&done);
1106 }
1107}
1108
1109
1110void LCodeGen::DoBitI(LBitI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001111 LOperand* left = instr->InputAt(0);
1112 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001113 ASSERT(left->Equals(instr->result()));
1114 ASSERT(left->IsRegister());
1115 Register result = ToRegister(left);
Steve Block44f0eee2011-05-26 01:26:41 +01001116 Operand right_operand(no_reg);
1117
1118 if (right->IsStackSlot() || right->IsArgument()) {
1119 Register right_reg = EmitLoadRegister(right, ip);
1120 right_operand = Operand(right_reg);
1121 } else {
1122 ASSERT(right->IsRegister() || right->IsConstantOperand());
1123 right_operand = ToOperand(right);
1124 }
1125
Ben Murdochb0fe1622011-05-05 13:52:32 +01001126 switch (instr->op()) {
1127 case Token::BIT_AND:
Steve Block44f0eee2011-05-26 01:26:41 +01001128 __ and_(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001129 break;
1130 case Token::BIT_OR:
Steve Block44f0eee2011-05-26 01:26:41 +01001131 __ orr(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001132 break;
1133 case Token::BIT_XOR:
Steve Block44f0eee2011-05-26 01:26:41 +01001134 __ eor(result, ToRegister(left), right_operand);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001135 break;
1136 default:
1137 UNREACHABLE();
1138 break;
1139 }
1140}
1141
1142
1143void LCodeGen::DoShiftI(LShiftI* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001144 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001145 LOperand* left = instr->InputAt(0);
1146 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001147 ASSERT(left->Equals(instr->result()));
1148 ASSERT(left->IsRegister());
1149 Register result = ToRegister(left);
1150 if (right->IsRegister()) {
1151 // Mask the right operand.
Steve Block9fac8402011-05-12 15:51:54 +01001152 __ and_(scratch, ToRegister(right), Operand(0x1F));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001153 switch (instr->op()) {
1154 case Token::SAR:
Steve Block9fac8402011-05-12 15:51:54 +01001155 __ mov(result, Operand(result, ASR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001156 break;
1157 case Token::SHR:
1158 if (instr->can_deopt()) {
Steve Block9fac8402011-05-12 15:51:54 +01001159 __ mov(result, Operand(result, LSR, scratch), SetCC);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001160 DeoptimizeIf(mi, instr->environment());
1161 } else {
Steve Block9fac8402011-05-12 15:51:54 +01001162 __ mov(result, Operand(result, LSR, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001163 }
1164 break;
1165 case Token::SHL:
Steve Block9fac8402011-05-12 15:51:54 +01001166 __ mov(result, Operand(result, LSL, scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001167 break;
1168 default:
1169 UNREACHABLE();
1170 break;
1171 }
1172 } else {
1173 int value = ToInteger32(LConstantOperand::cast(right));
1174 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1175 switch (instr->op()) {
1176 case Token::SAR:
1177 if (shift_count != 0) {
1178 __ mov(result, Operand(result, ASR, shift_count));
1179 }
1180 break;
1181 case Token::SHR:
1182 if (shift_count == 0 && instr->can_deopt()) {
1183 __ tst(result, Operand(0x80000000));
1184 DeoptimizeIf(ne, instr->environment());
1185 } else {
1186 __ mov(result, Operand(result, LSR, shift_count));
1187 }
1188 break;
1189 case Token::SHL:
1190 if (shift_count != 0) {
1191 __ mov(result, Operand(result, LSL, shift_count));
1192 }
1193 break;
1194 default:
1195 UNREACHABLE();
1196 break;
1197 }
1198 }
1199}
1200
1201
1202void LCodeGen::DoSubI(LSubI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01001203 LOperand* left = instr->InputAt(0);
1204 LOperand* right = instr->InputAt(1);
1205 ASSERT(left->Equals(instr->result()));
1206 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1207 SBit set_cond = can_overflow ? SetCC : LeaveCC;
1208
1209 if (right->IsStackSlot() || right->IsArgument()) {
1210 Register right_reg = EmitLoadRegister(right, ip);
1211 __ sub(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1212 } else {
1213 ASSERT(right->IsRegister() || right->IsConstantOperand());
1214 __ sub(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1215 }
1216
1217 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001218 DeoptimizeIf(vs, instr->environment());
1219 }
1220}
1221
1222
1223void LCodeGen::DoConstantI(LConstantI* instr) {
1224 ASSERT(instr->result()->IsRegister());
1225 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1226}
1227
1228
1229void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001230 ASSERT(instr->result()->IsDoubleRegister());
1231 DwVfpRegister result = ToDoubleRegister(instr->result());
1232 double v = instr->value();
1233 __ vmov(result, v);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001234}
1235
1236
1237void LCodeGen::DoConstantT(LConstantT* instr) {
1238 ASSERT(instr->result()->IsRegister());
1239 __ mov(ToRegister(instr->result()), Operand(instr->value()));
1240}
1241
1242
Steve Block9fac8402011-05-12 15:51:54 +01001243void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001244 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001245 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001246 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
1247}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001248
Ben Murdochb0fe1622011-05-05 13:52:32 +01001249
Steve Block44f0eee2011-05-26 01:26:41 +01001250void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001251 Register result = ToRegister(instr->result());
1252 Register array = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01001253 __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01001254}
1255
1256
Steve Block9fac8402011-05-12 15:51:54 +01001257void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1258 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001259 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001260 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001261}
1262
1263
1264void LCodeGen::DoValueOf(LValueOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001265 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001266 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001267 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001268 ASSERT(input.is(result));
1269 Label done;
1270
1271 // If the object is a smi return the object.
1272 __ tst(input, Operand(kSmiTagMask));
1273 __ b(eq, &done);
1274
1275 // If the object is not a value type, return the object.
1276 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1277 __ b(ne, &done);
1278 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1279
1280 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001281}
1282
1283
1284void LCodeGen::DoBitNotI(LBitNotI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001285 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001286 ASSERT(input->Equals(instr->result()));
1287 __ mvn(ToRegister(input), Operand(ToRegister(input)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001288}
1289
1290
1291void LCodeGen::DoThrow(LThrow* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001292 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001293 __ push(input_reg);
1294 CallRuntime(Runtime::kThrow, 1, instr);
1295
1296 if (FLAG_debug_code) {
1297 __ stop("Unreachable code.");
1298 }
1299}
1300
1301
1302void LCodeGen::DoAddI(LAddI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001303 LOperand* left = instr->InputAt(0);
1304 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001305 ASSERT(left->Equals(instr->result()));
Steve Block44f0eee2011-05-26 01:26:41 +01001306 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1307 SBit set_cond = can_overflow ? SetCC : LeaveCC;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001308
Steve Block44f0eee2011-05-26 01:26:41 +01001309 if (right->IsStackSlot() || right->IsArgument()) {
1310 Register right_reg = EmitLoadRegister(right, ip);
1311 __ add(ToRegister(left), ToRegister(left), Operand(right_reg), set_cond);
1312 } else {
1313 ASSERT(right->IsRegister() || right->IsConstantOperand());
1314 __ add(ToRegister(left), ToRegister(left), ToOperand(right), set_cond);
1315 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001316
Steve Block44f0eee2011-05-26 01:26:41 +01001317 if (can_overflow) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001318 DeoptimizeIf(vs, instr->environment());
1319 }
1320}
1321
1322
1323void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001324 DoubleRegister left = ToDoubleRegister(instr->InputAt(0));
1325 DoubleRegister right = ToDoubleRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001326 switch (instr->op()) {
1327 case Token::ADD:
1328 __ vadd(left, left, right);
1329 break;
1330 case Token::SUB:
1331 __ vsub(left, left, right);
1332 break;
1333 case Token::MUL:
1334 __ vmul(left, left, right);
1335 break;
1336 case Token::DIV:
1337 __ vdiv(left, left, right);
1338 break;
1339 case Token::MOD: {
Steve Block1e0659c2011-05-24 12:43:12 +01001340 // Save r0-r3 on the stack.
1341 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
1342
1343 __ PrepareCallCFunction(4, scratch0());
1344 __ vmov(r0, r1, left);
1345 __ vmov(r2, r3, right);
Steve Block44f0eee2011-05-26 01:26:41 +01001346 __ CallCFunction(
1347 ExternalReference::double_fp_operation(Token::MOD, isolate()), 4);
Steve Block1e0659c2011-05-24 12:43:12 +01001348 // Move the result in the double result register.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001349 __ GetCFunctionDoubleResult(ToDoubleRegister(instr->result()));
Steve Block1e0659c2011-05-24 12:43:12 +01001350
1351 // Restore r0-r3.
1352 __ ldm(ia_w, sp, r0.bit() | r1.bit() | r2.bit() | r3.bit());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001353 break;
1354 }
1355 default:
1356 UNREACHABLE();
1357 break;
1358 }
1359}
1360
1361
1362void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001363 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
1364 ASSERT(ToRegister(instr->InputAt(1)).is(r0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001365 ASSERT(ToRegister(instr->result()).is(r0));
1366
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001367 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001368 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1369}
1370
1371
1372int LCodeGen::GetNextEmittedBlock(int block) {
1373 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1374 LLabel* label = chunk_->GetLabel(i);
1375 if (!label->HasReplacement()) return i;
1376 }
1377 return -1;
1378}
1379
1380
1381void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1382 int next_block = GetNextEmittedBlock(current_block_);
1383 right_block = chunk_->LookupDestination(right_block);
1384 left_block = chunk_->LookupDestination(left_block);
1385
1386 if (right_block == left_block) {
1387 EmitGoto(left_block);
1388 } else if (left_block == next_block) {
1389 __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1390 } else if (right_block == next_block) {
1391 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1392 } else {
1393 __ b(cc, chunk_->GetAssemblyLabel(left_block));
1394 __ b(chunk_->GetAssemblyLabel(right_block));
1395 }
1396}
1397
1398
1399void LCodeGen::DoBranch(LBranch* instr) {
1400 int true_block = chunk_->LookupDestination(instr->true_block_id());
1401 int false_block = chunk_->LookupDestination(instr->false_block_id());
1402
1403 Representation r = instr->hydrogen()->representation();
1404 if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001405 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001406 __ cmp(reg, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001407 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001408 } else if (r.IsDouble()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001409 DoubleRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001410 Register scratch = scratch0();
1411
Ben Murdochb8e0da22011-05-16 14:20:40 +01001412 // Test the double value. Zero and NaN are false.
1413 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1414 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001415 EmitBranch(true_block, false_block, ne);
1416 } else {
1417 ASSERT(r.IsTagged());
Steve Block1e0659c2011-05-24 12:43:12 +01001418 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001419 if (instr->hydrogen()->type().IsBoolean()) {
1420 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1421 __ cmp(reg, ip);
1422 EmitBranch(true_block, false_block, eq);
1423 } else {
1424 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1425 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1426
1427 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1428 __ cmp(reg, ip);
1429 __ b(eq, false_label);
1430 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1431 __ cmp(reg, ip);
1432 __ b(eq, true_label);
1433 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1434 __ cmp(reg, ip);
1435 __ b(eq, false_label);
1436 __ cmp(reg, Operand(0));
1437 __ b(eq, false_label);
1438 __ tst(reg, Operand(kSmiTagMask));
1439 __ b(eq, true_label);
1440
Ben Murdochb8e0da22011-05-16 14:20:40 +01001441 // Test double values. Zero and NaN are false.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001442 Label call_stub;
1443 DoubleRegister dbl_scratch = d0;
Steve Block9fac8402011-05-12 15:51:54 +01001444 Register scratch = scratch0();
1445 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001446 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01001447 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001448 __ b(ne, &call_stub);
1449 __ sub(ip, reg, Operand(kHeapObjectTag));
1450 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001451 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1452 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001453 __ b(ne, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001454 __ b(true_label);
1455
1456 // The conversion stub doesn't cause garbage collections so it's
1457 // safe to not record a safepoint after the call.
1458 __ bind(&call_stub);
1459 ToBooleanStub stub(reg);
1460 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1461 __ stm(db_w, sp, saved_regs);
1462 __ CallStub(&stub);
1463 __ cmp(reg, Operand(0));
1464 __ ldm(ia_w, sp, saved_regs);
Steve Block1e0659c2011-05-24 12:43:12 +01001465 EmitBranch(true_block, false_block, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001466 }
1467 }
1468}
1469
1470
1471void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001472 block = chunk_->LookupDestination(block);
1473 int next_block = GetNextEmittedBlock(current_block_);
1474 if (block != next_block) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001475 // Perform stack overflow check if this goto needs it before jumping.
1476 if (deferred_stack_check != NULL) {
1477 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1478 __ cmp(sp, Operand(ip));
1479 __ b(hs, chunk_->GetAssemblyLabel(block));
1480 __ jmp(deferred_stack_check->entry());
1481 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1482 } else {
1483 __ jmp(chunk_->GetAssemblyLabel(block));
1484 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001485 }
1486}
1487
1488
1489void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001490 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
1491 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001492}
1493
1494
1495void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001496 class DeferredStackCheck: public LDeferredCode {
1497 public:
1498 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1499 : LDeferredCode(codegen), instr_(instr) { }
1500 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1501 private:
1502 LGoto* instr_;
1503 };
1504
1505 DeferredStackCheck* deferred = NULL;
1506 if (instr->include_stack_check()) {
1507 deferred = new DeferredStackCheck(this, instr);
1508 }
1509 EmitGoto(instr->block_id(), deferred);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001510}
1511
1512
1513Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Steve Block1e0659c2011-05-24 12:43:12 +01001514 Condition cond = kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001515 switch (op) {
1516 case Token::EQ:
1517 case Token::EQ_STRICT:
1518 cond = eq;
1519 break;
1520 case Token::LT:
1521 cond = is_unsigned ? lo : lt;
1522 break;
1523 case Token::GT:
1524 cond = is_unsigned ? hi : gt;
1525 break;
1526 case Token::LTE:
1527 cond = is_unsigned ? ls : le;
1528 break;
1529 case Token::GTE:
1530 cond = is_unsigned ? hs : ge;
1531 break;
1532 case Token::IN:
1533 case Token::INSTANCEOF:
1534 default:
1535 UNREACHABLE();
1536 }
1537 return cond;
1538}
1539
1540
1541void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Steve Block1e0659c2011-05-24 12:43:12 +01001542 __ cmp(ToRegister(left), ToRegister(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001543}
1544
1545
1546void LCodeGen::DoCmpID(LCmpID* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001547 LOperand* left = instr->InputAt(0);
1548 LOperand* right = instr->InputAt(1);
1549 LOperand* result = instr->result();
1550 Register scratch = scratch0();
1551
1552 Label unordered, done;
1553 if (instr->is_double()) {
1554 // Compare left and right as doubles and load the
1555 // resulting flags into the normal status register.
1556 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1557 // If a NaN is involved, i.e. the result is unordered (V set),
1558 // jump to unordered to return false.
1559 __ b(vs, &unordered);
1560 } else {
1561 EmitCmpI(left, right);
1562 }
1563
1564 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1565 __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1566 __ b(cc, &done);
1567
1568 __ bind(&unordered);
1569 __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1570 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001571}
1572
1573
1574void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001575 LOperand* left = instr->InputAt(0);
1576 LOperand* right = instr->InputAt(1);
1577 int false_block = chunk_->LookupDestination(instr->false_block_id());
1578 int true_block = chunk_->LookupDestination(instr->true_block_id());
1579
1580 if (instr->is_double()) {
1581 // Compare left and right as doubles and load the
1582 // resulting flags into the normal status register.
1583 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right));
1584 // If a NaN is involved, i.e. the result is unordered (V set),
1585 // jump to false block label.
1586 __ b(vs, chunk_->GetAssemblyLabel(false_block));
1587 } else {
1588 EmitCmpI(left, right);
1589 }
1590
1591 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1592 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001593}
1594
1595
1596void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001597 Register left = ToRegister(instr->InputAt(0));
1598 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001599 Register result = ToRegister(instr->result());
1600
1601 __ cmp(left, Operand(right));
1602 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1603 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001604}
1605
1606
1607void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001608 Register left = ToRegister(instr->InputAt(0));
1609 Register right = ToRegister(instr->InputAt(1));
1610 int false_block = chunk_->LookupDestination(instr->false_block_id());
1611 int true_block = chunk_->LookupDestination(instr->true_block_id());
1612
1613 __ cmp(left, Operand(right));
1614 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001615}
1616
1617
1618void LCodeGen::DoIsNull(LIsNull* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001619 Register reg = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001620 Register result = ToRegister(instr->result());
1621
1622 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1623 __ cmp(reg, ip);
1624 if (instr->is_strict()) {
1625 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1626 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1627 } else {
1628 Label true_value, false_value, done;
1629 __ b(eq, &true_value);
1630 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1631 __ cmp(ip, reg);
1632 __ b(eq, &true_value);
1633 __ tst(reg, Operand(kSmiTagMask));
1634 __ b(eq, &false_value);
1635 // Check for undetectable objects by looking in the bit field in
1636 // the map. The object has already been smi checked.
1637 Register scratch = result;
1638 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1639 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1640 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1641 __ b(ne, &true_value);
1642 __ bind(&false_value);
1643 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1644 __ jmp(&done);
1645 __ bind(&true_value);
1646 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1647 __ bind(&done);
1648 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001649}
1650
1651
1652void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001653 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001654 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001655
1656 // TODO(fsc): If the expression is known to be a smi, then it's
1657 // definitely not null. Jump to the false block.
1658
1659 int true_block = chunk_->LookupDestination(instr->true_block_id());
1660 int false_block = chunk_->LookupDestination(instr->false_block_id());
1661
1662 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1663 __ cmp(reg, ip);
1664 if (instr->is_strict()) {
1665 EmitBranch(true_block, false_block, eq);
1666 } else {
1667 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1668 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1669 __ b(eq, true_label);
1670 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1671 __ cmp(reg, ip);
1672 __ b(eq, true_label);
1673 __ tst(reg, Operand(kSmiTagMask));
1674 __ b(eq, false_label);
1675 // Check for undetectable objects by looking in the bit field in
1676 // the map. The object has already been smi checked.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001677 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1678 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1679 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1680 EmitBranch(true_block, false_block, ne);
1681 }
1682}
1683
1684
1685Condition LCodeGen::EmitIsObject(Register input,
1686 Register temp1,
1687 Register temp2,
1688 Label* is_not_object,
1689 Label* is_object) {
Steve Block1e0659c2011-05-24 12:43:12 +01001690 __ JumpIfSmi(input, is_not_object);
1691
1692 __ LoadRoot(temp1, Heap::kNullValueRootIndex);
1693 __ cmp(input, temp1);
1694 __ b(eq, is_object);
1695
1696 // Load map.
1697 __ ldr(temp1, FieldMemOperand(input, HeapObject::kMapOffset));
1698 // Undetectable objects behave like undefined.
1699 __ ldrb(temp2, FieldMemOperand(temp1, Map::kBitFieldOffset));
1700 __ tst(temp2, Operand(1 << Map::kIsUndetectable));
1701 __ b(ne, is_not_object);
1702
1703 // Load instance type and check that it is in object type range.
1704 __ ldrb(temp2, FieldMemOperand(temp1, Map::kInstanceTypeOffset));
1705 __ cmp(temp2, Operand(FIRST_JS_OBJECT_TYPE));
1706 __ b(lt, is_not_object);
1707 __ cmp(temp2, Operand(LAST_JS_OBJECT_TYPE));
1708 return le;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001709}
1710
1711
1712void LCodeGen::DoIsObject(LIsObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001713 Register reg = ToRegister(instr->InputAt(0));
1714 Register result = ToRegister(instr->result());
1715 Register temp = scratch0();
1716 Label is_false, is_true, done;
1717
1718 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1719 __ b(true_cond, &is_true);
1720
1721 __ bind(&is_false);
1722 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1723 __ b(&done);
1724
1725 __ bind(&is_true);
1726 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1727
1728 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001729}
1730
1731
1732void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001733 Register reg = ToRegister(instr->InputAt(0));
1734 Register temp1 = ToRegister(instr->TempAt(0));
1735 Register temp2 = scratch0();
1736
1737 int true_block = chunk_->LookupDestination(instr->true_block_id());
1738 int false_block = chunk_->LookupDestination(instr->false_block_id());
1739 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1740 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1741
1742 Condition true_cond =
1743 EmitIsObject(reg, temp1, temp2, false_label, true_label);
1744
1745 EmitBranch(true_block, false_block, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001746}
1747
1748
1749void LCodeGen::DoIsSmi(LIsSmi* instr) {
1750 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1751 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01001752 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001753 __ tst(input_reg, Operand(kSmiTagMask));
1754 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1755 Label done;
1756 __ b(eq, &done);
1757 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1758 __ bind(&done);
1759}
1760
1761
1762void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1763 int true_block = chunk_->LookupDestination(instr->true_block_id());
1764 int false_block = chunk_->LookupDestination(instr->false_block_id());
1765
Steve Block1e0659c2011-05-24 12:43:12 +01001766 Register input_reg = EmitLoadRegister(instr->InputAt(0), ip);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001767 __ tst(input_reg, Operand(kSmiTagMask));
1768 EmitBranch(true_block, false_block, eq);
1769}
1770
1771
Steve Block1e0659c2011-05-24 12:43:12 +01001772static InstanceType TestType(HHasInstanceType* instr) {
1773 InstanceType from = instr->from();
1774 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001775 if (from == FIRST_TYPE) return to;
1776 ASSERT(from == to || to == LAST_TYPE);
1777 return from;
1778}
1779
1780
Steve Block1e0659c2011-05-24 12:43:12 +01001781static Condition BranchCondition(HHasInstanceType* instr) {
1782 InstanceType from = instr->from();
1783 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001784 if (from == to) return eq;
1785 if (to == LAST_TYPE) return hs;
1786 if (from == FIRST_TYPE) return ls;
1787 UNREACHABLE();
1788 return eq;
1789}
1790
1791
1792void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001793 Register input = ToRegister(instr->InputAt(0));
1794 Register result = ToRegister(instr->result());
1795
1796 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1797 Label done;
1798 __ tst(input, Operand(kSmiTagMask));
1799 __ LoadRoot(result, Heap::kFalseValueRootIndex, eq);
1800 __ b(eq, &done);
1801 __ CompareObjectType(input, result, result, TestType(instr->hydrogen()));
1802 Condition cond = BranchCondition(instr->hydrogen());
1803 __ LoadRoot(result, Heap::kTrueValueRootIndex, cond);
1804 __ LoadRoot(result, Heap::kFalseValueRootIndex, NegateCondition(cond));
1805 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001806}
1807
1808
1809void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001810 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001811 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001812
1813 int true_block = chunk_->LookupDestination(instr->true_block_id());
1814 int false_block = chunk_->LookupDestination(instr->false_block_id());
1815
1816 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1817
1818 __ tst(input, Operand(kSmiTagMask));
1819 __ b(eq, false_label);
1820
Steve Block1e0659c2011-05-24 12:43:12 +01001821 __ CompareObjectType(input, scratch, scratch, TestType(instr->hydrogen()));
1822 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001823}
1824
1825
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001826void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1827 Register input = ToRegister(instr->InputAt(0));
1828 Register result = ToRegister(instr->result());
1829
1830 if (FLAG_debug_code) {
1831 __ AbortIfNotString(input);
1832 }
1833
1834 __ ldr(result, FieldMemOperand(input, String::kHashFieldOffset));
1835 __ IndexFromHash(result, result);
1836}
1837
1838
Ben Murdochb0fe1622011-05-05 13:52:32 +01001839void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001840 Register input = ToRegister(instr->InputAt(0));
1841 Register result = ToRegister(instr->result());
1842 Register scratch = scratch0();
1843
1844 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1845 __ ldr(scratch,
1846 FieldMemOperand(input, String::kHashFieldOffset));
1847 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1848 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1849 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001850}
1851
1852
1853void LCodeGen::DoHasCachedArrayIndexAndBranch(
1854 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001855 Register input = ToRegister(instr->InputAt(0));
1856 Register scratch = scratch0();
1857
1858 int true_block = chunk_->LookupDestination(instr->true_block_id());
1859 int false_block = chunk_->LookupDestination(instr->false_block_id());
1860
1861 __ ldr(scratch,
1862 FieldMemOperand(input, String::kHashFieldOffset));
1863 __ tst(scratch, Operand(String::kContainsCachedArrayIndexMask));
1864 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001865}
1866
1867
Ben Murdochb8e0da22011-05-16 14:20:40 +01001868// Branches to a label or falls through with the answer in flags. Trashes
Ben Murdochb0fe1622011-05-05 13:52:32 +01001869// the temp registers, but not the input. Only input and temp2 may alias.
1870void LCodeGen::EmitClassOfTest(Label* is_true,
1871 Label* is_false,
1872 Handle<String>class_name,
1873 Register input,
1874 Register temp,
1875 Register temp2) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001876 ASSERT(!input.is(temp));
1877 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1878 __ tst(input, Operand(kSmiTagMask));
1879 __ b(eq, is_false);
1880 __ CompareObjectType(input, temp, temp2, FIRST_JS_OBJECT_TYPE);
1881 __ b(lt, is_false);
1882
1883 // Map is now in temp.
1884 // Functions have class 'Function'.
1885 __ CompareInstanceType(temp, temp2, JS_FUNCTION_TYPE);
1886 if (class_name->IsEqualTo(CStrVector("Function"))) {
1887 __ b(eq, is_true);
1888 } else {
1889 __ b(eq, is_false);
1890 }
1891
1892 // Check if the constructor in the map is a function.
1893 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1894
1895 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1896 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1897 // LAST_JS_OBJECT_TYPE.
1898 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1899 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1900
1901 // Objects with a non-function constructor have class 'Object'.
1902 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1903 if (class_name->IsEqualTo(CStrVector("Object"))) {
1904 __ b(ne, is_true);
1905 } else {
1906 __ b(ne, is_false);
1907 }
1908
1909 // temp now contains the constructor function. Grab the
1910 // instance class name from there.
1911 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1912 __ ldr(temp, FieldMemOperand(temp,
1913 SharedFunctionInfo::kInstanceClassNameOffset));
1914 // The class name we are testing against is a symbol because it's a literal.
1915 // The name in the constructor is a symbol because of the way the context is
1916 // booted. This routine isn't expected to work for random API-created
1917 // classes and it doesn't have to because you can't access it with natives
1918 // syntax. Since both sides are symbols it is sufficient to use an identity
1919 // comparison.
1920 __ cmp(temp, Operand(class_name));
1921 // End with the answer in flags.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001922}
1923
1924
1925void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001926 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001927 Register result = ToRegister(instr->result());
1928 ASSERT(input.is(result));
1929 Handle<String> class_name = instr->hydrogen()->class_name();
1930
1931 Label done, is_true, is_false;
1932
1933 EmitClassOfTest(&is_true, &is_false, class_name, input, scratch0(), input);
1934 __ b(ne, &is_false);
1935
1936 __ bind(&is_true);
1937 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1938 __ jmp(&done);
1939
1940 __ bind(&is_false);
1941 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1942 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001943}
1944
1945
1946void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001947 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001948 Register temp = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01001949 Register temp2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001950 Handle<String> class_name = instr->hydrogen()->class_name();
1951
1952 int true_block = chunk_->LookupDestination(instr->true_block_id());
1953 int false_block = chunk_->LookupDestination(instr->false_block_id());
1954
1955 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1956 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1957
1958 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1959
1960 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001961}
1962
1963
1964void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001965 Register reg = ToRegister(instr->InputAt(0));
1966 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001967 int true_block = instr->true_block_id();
1968 int false_block = instr->false_block_id();
1969
1970 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1971 __ cmp(temp, Operand(instr->map()));
1972 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001973}
1974
1975
1976void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001977 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1978 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
Steve Block9fac8402011-05-12 15:51:54 +01001979
Ben Murdochb0fe1622011-05-05 13:52:32 +01001980 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1981 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1982
Steve Block44f0eee2011-05-26 01:26:41 +01001983 __ cmp(r0, Operand(0));
1984 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
1985 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001986}
1987
1988
1989void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001990 ASSERT(ToRegister(instr->InputAt(0)).is(r0)); // Object is in r0.
1991 ASSERT(ToRegister(instr->InputAt(1)).is(r1)); // Function is in r1.
1992
1993 int true_block = chunk_->LookupDestination(instr->true_block_id());
1994 int false_block = chunk_->LookupDestination(instr->false_block_id());
1995
1996 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1997 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01001998 __ cmp(r0, Operand(0));
Steve Block1e0659c2011-05-24 12:43:12 +01001999 EmitBranch(true_block, false_block, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002000}
2001
2002
Ben Murdoch086aeea2011-05-13 15:57:08 +01002003void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002004 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
2005 public:
2006 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2007 LInstanceOfKnownGlobal* instr)
2008 : LDeferredCode(codegen), instr_(instr) { }
2009 virtual void Generate() {
2010 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
2011 }
2012
2013 Label* map_check() { return &map_check_; }
2014
2015 private:
2016 LInstanceOfKnownGlobal* instr_;
2017 Label map_check_;
2018 };
2019
2020 DeferredInstanceOfKnownGlobal* deferred;
2021 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
2022
2023 Label done, false_result;
2024 Register object = ToRegister(instr->InputAt(0));
2025 Register temp = ToRegister(instr->TempAt(0));
2026 Register result = ToRegister(instr->result());
2027
2028 ASSERT(object.is(r0));
2029 ASSERT(result.is(r0));
2030
2031 // A Smi is not instance of anything.
2032 __ JumpIfSmi(object, &false_result);
2033
2034 // This is the inlined call site instanceof cache. The two occurences of the
2035 // hole value will be patched to the last map/result pair generated by the
2036 // instanceof stub.
2037 Label cache_miss;
2038 Register map = temp;
2039 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2040 __ bind(deferred->map_check()); // Label for calculating code patching.
2041 // We use Factory::the_hole_value() on purpose instead of loading from the
2042 // root array to force relocation to be able to later patch with
2043 // the cached map.
Steve Block44f0eee2011-05-26 01:26:41 +01002044 __ mov(ip, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002045 __ cmp(map, Operand(ip));
2046 __ b(ne, &cache_miss);
2047 // We use Factory::the_hole_value() on purpose instead of loading from the
2048 // root array to force relocation to be able to later patch
2049 // with true or false.
Steve Block44f0eee2011-05-26 01:26:41 +01002050 __ mov(result, Operand(factory()->the_hole_value()));
Steve Block1e0659c2011-05-24 12:43:12 +01002051 __ b(&done);
2052
2053 // The inlined call site cache did not match. Check null and string before
2054 // calling the deferred code.
2055 __ bind(&cache_miss);
2056 // Null is not instance of anything.
2057 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2058 __ cmp(object, Operand(ip));
2059 __ b(eq, &false_result);
2060
2061 // String values is not instance of anything.
2062 Condition is_string = masm_->IsObjectStringType(object, temp);
2063 __ b(is_string, &false_result);
2064
2065 // Go to the deferred code.
2066 __ b(deferred->entry());
2067
2068 __ bind(&false_result);
2069 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2070
2071 // Here result has either true or false. Deferred code also produces true or
2072 // false object.
2073 __ bind(deferred->exit());
2074 __ bind(&done);
2075}
2076
2077
2078void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2079 Label* map_check) {
2080 Register result = ToRegister(instr->result());
2081 ASSERT(result.is(r0));
2082
2083 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2084 flags = static_cast<InstanceofStub::Flags>(
2085 flags | InstanceofStub::kArgsInRegisters);
2086 flags = static_cast<InstanceofStub::Flags>(
2087 flags | InstanceofStub::kCallSiteInlineCheck);
2088 flags = static_cast<InstanceofStub::Flags>(
2089 flags | InstanceofStub::kReturnTrueFalseObject);
2090 InstanceofStub stub(flags);
2091
Ben Murdoch8b112d22011-06-08 16:22:53 +01002092 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002093
2094 // Get the temp register reserved by the instruction. This needs to be r4 as
2095 // its slot of the pushing of safepoint registers is used to communicate the
2096 // offset to the location of the map check.
2097 Register temp = ToRegister(instr->TempAt(0));
2098 ASSERT(temp.is(r4));
2099 __ mov(InstanceofStub::right(), Operand(instr->function()));
2100 static const int kAdditionalDelta = 4;
2101 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2102 Label before_push_delta;
2103 __ bind(&before_push_delta);
2104 __ BlockConstPoolFor(kAdditionalDelta);
2105 __ mov(temp, Operand(delta * kPointerSize));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002106 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002107 CallCodeGeneric(stub.GetCode(),
2108 RelocInfo::CODE_TARGET,
2109 instr,
2110 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Steve Block1e0659c2011-05-24 12:43:12 +01002111 // Put the result value into the result register slot and
2112 // restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002113 __ StoreToSafepointRegisterSlot(result, result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002114}
2115
Ben Murdochb0fe1622011-05-05 13:52:32 +01002116
2117static Condition ComputeCompareCondition(Token::Value op) {
2118 switch (op) {
2119 case Token::EQ_STRICT:
2120 case Token::EQ:
2121 return eq;
2122 case Token::LT:
2123 return lt;
2124 case Token::GT:
2125 return gt;
2126 case Token::LTE:
2127 return le;
2128 case Token::GTE:
2129 return ge;
2130 default:
2131 UNREACHABLE();
Steve Block1e0659c2011-05-24 12:43:12 +01002132 return kNoCondition;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002133 }
2134}
2135
2136
2137void LCodeGen::DoCmpT(LCmpT* instr) {
2138 Token::Value op = instr->op();
2139
2140 Handle<Code> ic = CompareIC::GetUninitialized(op);
2141 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01002142 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002143
2144 Condition condition = ComputeCompareCondition(op);
2145 if (op == Token::GT || op == Token::LTE) {
2146 condition = ReverseCondition(condition);
2147 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002148 __ LoadRoot(ToRegister(instr->result()),
2149 Heap::kTrueValueRootIndex,
2150 condition);
2151 __ LoadRoot(ToRegister(instr->result()),
2152 Heap::kFalseValueRootIndex,
2153 NegateCondition(condition));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002154}
2155
2156
2157void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002158 Token::Value op = instr->op();
2159 int true_block = chunk_->LookupDestination(instr->true_block_id());
2160 int false_block = chunk_->LookupDestination(instr->false_block_id());
2161
2162 Handle<Code> ic = CompareIC::GetUninitialized(op);
2163 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2164
2165 // The compare stub expects compare condition and the input operands
2166 // reversed for GT and LTE.
2167 Condition condition = ComputeCompareCondition(op);
2168 if (op == Token::GT || op == Token::LTE) {
2169 condition = ReverseCondition(condition);
2170 }
2171 __ cmp(r0, Operand(0));
2172 EmitBranch(true_block, false_block, condition);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002173}
2174
2175
2176void LCodeGen::DoReturn(LReturn* instr) {
2177 if (FLAG_trace) {
2178 // Push the return value on the stack as the parameter.
2179 // Runtime::TraceExit returns its parameter in r0.
2180 __ push(r0);
2181 __ CallRuntime(Runtime::kTraceExit, 1);
2182 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01002183 int32_t sp_delta = (GetParameterCount() + 1) * kPointerSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002184 __ mov(sp, fp);
2185 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2186 __ add(sp, sp, Operand(sp_delta));
2187 __ Jump(lr);
2188}
2189
2190
Ben Murdoch8b112d22011-06-08 16:22:53 +01002191void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002192 Register result = ToRegister(instr->result());
2193 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
2194 __ ldr(result, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2195 if (instr->hydrogen()->check_hole_value()) {
2196 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2197 __ cmp(result, ip);
2198 DeoptimizeIf(eq, instr->environment());
2199 }
2200}
2201
2202
Ben Murdoch8b112d22011-06-08 16:22:53 +01002203void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2204 ASSERT(ToRegister(instr->global_object()).is(r0));
2205 ASSERT(ToRegister(instr->result()).is(r0));
2206
2207 __ mov(r2, Operand(instr->name()));
2208 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2209 : RelocInfo::CODE_TARGET_CONTEXT;
2210 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2211 CallCode(ic, mode, instr);
2212}
2213
2214
2215void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002216 Register value = ToRegister(instr->InputAt(0));
2217 Register scratch = scratch0();
2218
2219 // Load the cell.
2220 __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
2221
2222 // If the cell we are storing to contains the hole it could have
2223 // been deleted from the property dictionary. In that case, we need
2224 // to update the property details in the property dictionary to mark
2225 // it as no longer deleted.
2226 if (instr->hydrogen()->check_hole_value()) {
2227 Register scratch2 = ToRegister(instr->TempAt(0));
2228 __ ldr(scratch2,
2229 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
2230 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2231 __ cmp(scratch2, ip);
2232 DeoptimizeIf(eq, instr->environment());
2233 }
2234
2235 // Store the value.
2236 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002237}
2238
2239
Ben Murdoch8b112d22011-06-08 16:22:53 +01002240void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2241 ASSERT(ToRegister(instr->global_object()).is(r1));
2242 ASSERT(ToRegister(instr->value()).is(r0));
2243
2244 __ mov(r2, Operand(instr->name()));
2245 Handle<Code> ic = instr->strict_mode()
2246 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2247 : isolate()->builtins()->StoreIC_Initialize();
2248 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2249}
2250
2251
Ben Murdochb8e0da22011-05-16 14:20:40 +01002252void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002253 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002254 Register result = ToRegister(instr->result());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002255 __ ldr(result, ContextOperand(context, instr->slot_index()));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002256}
2257
2258
Steve Block1e0659c2011-05-24 12:43:12 +01002259void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2260 Register context = ToRegister(instr->context());
2261 Register value = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002262 __ str(value, ContextOperand(context, instr->slot_index()));
2263 if (instr->needs_write_barrier()) {
2264 int offset = Context::SlotOffset(instr->slot_index());
2265 __ RecordWrite(context, Operand(offset), value, scratch0());
2266 }
2267}
2268
2269
Ben Murdochb0fe1622011-05-05 13:52:32 +01002270void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002271 Register object = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002272 Register result = ToRegister(instr->result());
2273 if (instr->hydrogen()->is_in_object()) {
2274 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
2275 } else {
2276 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2277 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
2278 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002279}
2280
2281
Steve Block44f0eee2011-05-26 01:26:41 +01002282void LCodeGen::EmitLoadField(Register result,
2283 Register object,
2284 Handle<Map> type,
2285 Handle<String> name) {
2286 LookupResult lookup;
2287 type->LookupInDescriptors(NULL, *name, &lookup);
2288 ASSERT(lookup.IsProperty() && lookup.type() == FIELD);
2289 int index = lookup.GetLocalFieldIndexFromMap(*type);
2290 int offset = index * kPointerSize;
2291 if (index < 0) {
2292 // Negative property indices are in-object properties, indexed
2293 // from the end of the fixed part of the object.
2294 __ ldr(result, FieldMemOperand(object, offset + type->instance_size()));
2295 } else {
2296 // Non-negative property indices are in the properties array.
2297 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2298 __ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
2299 }
2300}
2301
2302
2303void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2304 Register object = ToRegister(instr->object());
2305 Register result = ToRegister(instr->result());
2306 Register scratch = scratch0();
2307 int map_count = instr->hydrogen()->types()->length();
2308 Handle<String> name = instr->hydrogen()->name();
2309 if (map_count == 0) {
2310 ASSERT(instr->hydrogen()->need_generic());
2311 __ mov(r2, Operand(name));
2312 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2313 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2314 } else {
2315 Label done;
2316 __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2317 for (int i = 0; i < map_count - 1; ++i) {
2318 Handle<Map> map = instr->hydrogen()->types()->at(i);
2319 Label next;
2320 __ cmp(scratch, Operand(map));
2321 __ b(ne, &next);
2322 EmitLoadField(result, object, map, name);
2323 __ b(&done);
2324 __ bind(&next);
2325 }
2326 Handle<Map> map = instr->hydrogen()->types()->last();
2327 __ cmp(scratch, Operand(map));
2328 if (instr->hydrogen()->need_generic()) {
2329 Label generic;
2330 __ b(ne, &generic);
2331 EmitLoadField(result, object, map, name);
2332 __ b(&done);
2333 __ bind(&generic);
2334 __ mov(r2, Operand(name));
2335 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2336 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2337 } else {
2338 DeoptimizeIf(ne, instr->environment());
2339 EmitLoadField(result, object, map, name);
2340 }
2341 __ bind(&done);
2342 }
2343}
2344
2345
Ben Murdochb0fe1622011-05-05 13:52:32 +01002346void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2347 ASSERT(ToRegister(instr->object()).is(r0));
2348 ASSERT(ToRegister(instr->result()).is(r0));
2349
2350 // Name is always in r2.
2351 __ mov(r2, Operand(instr->name()));
Steve Block44f0eee2011-05-26 01:26:41 +01002352 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002353 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2354}
2355
2356
Steve Block9fac8402011-05-12 15:51:54 +01002357void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2358 Register scratch = scratch0();
2359 Register function = ToRegister(instr->function());
2360 Register result = ToRegister(instr->result());
2361
2362 // Check that the function really is a function. Load map into the
2363 // result register.
2364 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
2365 DeoptimizeIf(ne, instr->environment());
2366
2367 // Make sure that the function has an instance prototype.
2368 Label non_instance;
2369 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
2370 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2371 __ b(ne, &non_instance);
2372
2373 // Get the prototype or initial map from the function.
2374 __ ldr(result,
2375 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2376
2377 // Check that the function has a prototype or an initial map.
2378 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2379 __ cmp(result, ip);
2380 DeoptimizeIf(eq, instr->environment());
2381
2382 // If the function does not have an initial map, we're done.
2383 Label done;
2384 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
2385 __ b(ne, &done);
2386
2387 // Get the prototype from the initial map.
2388 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
2389 __ jmp(&done);
2390
2391 // Non-instance prototype: Fetch prototype from constructor field
2392 // in initial map.
2393 __ bind(&non_instance);
2394 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
2395
2396 // All done.
2397 __ bind(&done);
2398}
2399
2400
Ben Murdochb0fe1622011-05-05 13:52:32 +01002401void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002402 Register result = ToRegister(instr->result());
2403 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002404 Register scratch = scratch0();
2405
Steve Block1e0659c2011-05-24 12:43:12 +01002406 __ ldr(result, FieldMemOperand(input, JSObject::kElementsOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002407 if (FLAG_debug_code) {
2408 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01002409 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002410 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
2411 __ cmp(scratch, ip);
2412 __ b(eq, &done);
2413 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
2414 __ cmp(scratch, ip);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002415 __ b(eq, &done);
2416 __ ldr(scratch, FieldMemOperand(result, HeapObject::kMapOffset));
2417 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2418 __ sub(scratch, scratch, Operand(FIRST_EXTERNAL_ARRAY_TYPE));
2419 __ cmp(scratch, Operand(kExternalArrayTypeCount));
2420 __ Check(cc, "Check for fast elements failed.");
Ben Murdoch086aeea2011-05-13 15:57:08 +01002421 __ bind(&done);
2422 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002423}
2424
2425
Steve Block44f0eee2011-05-26 01:26:41 +01002426void LCodeGen::DoLoadExternalArrayPointer(
2427 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002428 Register to_reg = ToRegister(instr->result());
2429 Register from_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002430 __ ldr(to_reg, FieldMemOperand(from_reg,
2431 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002432}
2433
2434
Ben Murdochb0fe1622011-05-05 13:52:32 +01002435void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002436 Register arguments = ToRegister(instr->arguments());
2437 Register length = ToRegister(instr->length());
2438 Register index = ToRegister(instr->index());
2439 Register result = ToRegister(instr->result());
2440
2441 // Bailout index is not a valid argument index. Use unsigned check to get
2442 // negative check for free.
2443 __ sub(length, length, index, SetCC);
2444 DeoptimizeIf(ls, instr->environment());
2445
2446 // There are two words between the frame pointer and the last argument.
2447 // Subtracting from length accounts for one of them add one more.
2448 __ add(length, length, Operand(1));
2449 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002450}
2451
2452
2453void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002454 Register elements = ToRegister(instr->elements());
2455 Register key = EmitLoadRegister(instr->key(), scratch0());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002456 Register result = ToRegister(instr->result());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002457 Register scratch = scratch0();
Ben Murdochb8e0da22011-05-16 14:20:40 +01002458 ASSERT(result.is(elements));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002459
2460 // Load the result.
2461 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2462 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2463
Ben Murdochb8e0da22011-05-16 14:20:40 +01002464 // Check for the hole value.
2465 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
2466 __ cmp(result, scratch);
2467 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002468}
2469
2470
Steve Block44f0eee2011-05-26 01:26:41 +01002471void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2472 LLoadKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002473 Register external_pointer = ToRegister(instr->external_pointer());
Steve Block1e0659c2011-05-24 12:43:12 +01002474 Register key = ToRegister(instr->key());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002475 ExternalArrayType array_type = instr->array_type();
2476 if (array_type == kExternalFloatArray) {
2477 CpuFeatures::Scope scope(VFP3);
2478 DwVfpRegister result(ToDoubleRegister(instr->result()));
2479 __ add(scratch0(), external_pointer, Operand(key, LSL, 2));
2480 __ vldr(result.low(), scratch0(), 0);
2481 __ vcvt_f64_f32(result, result.low());
2482 } else {
2483 Register result(ToRegister(instr->result()));
2484 switch (array_type) {
2485 case kExternalByteArray:
2486 __ ldrsb(result, MemOperand(external_pointer, key));
2487 break;
2488 case kExternalUnsignedByteArray:
2489 case kExternalPixelArray:
2490 __ ldrb(result, MemOperand(external_pointer, key));
2491 break;
2492 case kExternalShortArray:
2493 __ ldrsh(result, MemOperand(external_pointer, key, LSL, 1));
2494 break;
2495 case kExternalUnsignedShortArray:
2496 __ ldrh(result, MemOperand(external_pointer, key, LSL, 1));
2497 break;
2498 case kExternalIntArray:
2499 __ ldr(result, MemOperand(external_pointer, key, LSL, 2));
2500 break;
2501 case kExternalUnsignedIntArray:
2502 __ ldr(result, MemOperand(external_pointer, key, LSL, 2));
2503 __ cmp(result, Operand(0x80000000));
2504 // TODO(danno): we could be more clever here, perhaps having a special
2505 // version of the stub that detects if the overflow case actually
2506 // happens, and generate code that returns a double rather than int.
2507 DeoptimizeIf(cs, instr->environment());
2508 break;
2509 case kExternalFloatArray:
2510 UNREACHABLE();
2511 break;
2512 }
2513 }
Steve Block1e0659c2011-05-24 12:43:12 +01002514}
2515
2516
Ben Murdochb0fe1622011-05-05 13:52:32 +01002517void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2518 ASSERT(ToRegister(instr->object()).is(r1));
2519 ASSERT(ToRegister(instr->key()).is(r0));
2520
Steve Block44f0eee2011-05-26 01:26:41 +01002521 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002522 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2523}
2524
2525
2526void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002527 Register scratch = scratch0();
2528 Register result = ToRegister(instr->result());
2529
2530 // Check if the calling frame is an arguments adaptor frame.
2531 Label done, adapted;
2532 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2533 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
2534 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2535
2536 // Result is the frame pointer for the frame if not adapted and for the real
2537 // frame below the adaptor frame if adapted.
2538 __ mov(result, fp, LeaveCC, ne);
2539 __ mov(result, scratch, LeaveCC, eq);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002540}
2541
2542
2543void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002544 Register elem = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002545 Register result = ToRegister(instr->result());
2546
2547 Label done;
2548
2549 // If no arguments adaptor frame the number of arguments is fixed.
2550 __ cmp(fp, elem);
2551 __ mov(result, Operand(scope()->num_parameters()));
2552 __ b(eq, &done);
2553
2554 // Arguments adaptor frame present. Get argument length from there.
2555 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2556 __ ldr(result,
2557 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
2558 __ SmiUntag(result);
2559
2560 // Argument length is in result register.
2561 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002562}
2563
2564
2565void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002566 Register receiver = ToRegister(instr->receiver());
2567 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002568 Register length = ToRegister(instr->length());
2569 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002570 Register scratch = scratch0();
2571 ASSERT(receiver.is(r0)); // Used for parameter count.
2572 ASSERT(function.is(r1)); // Required by InvokeFunction.
2573 ASSERT(ToRegister(instr->result()).is(r0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002574
Steve Block1e0659c2011-05-24 12:43:12 +01002575 // If the receiver is null or undefined, we have to pass the global object
2576 // as a receiver.
2577 Label global_object, receiver_ok;
2578 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
2579 __ cmp(receiver, scratch);
2580 __ b(eq, &global_object);
2581 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
2582 __ cmp(receiver, scratch);
2583 __ b(eq, &global_object);
2584
2585 // Deoptimize if the receiver is not a JS object.
2586 __ tst(receiver, Operand(kSmiTagMask));
2587 DeoptimizeIf(eq, instr->environment());
2588 __ CompareObjectType(receiver, scratch, scratch, FIRST_JS_OBJECT_TYPE);
2589 DeoptimizeIf(lo, instr->environment());
2590 __ jmp(&receiver_ok);
2591
2592 __ bind(&global_object);
2593 __ ldr(receiver, GlobalObjectOperand());
2594 __ bind(&receiver_ok);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002595
2596 // Copy the arguments to this function possibly from the
2597 // adaptor frame below it.
2598 const uint32_t kArgumentsLimit = 1 * KB;
2599 __ cmp(length, Operand(kArgumentsLimit));
2600 DeoptimizeIf(hi, instr->environment());
2601
2602 // Push the receiver and use the register to keep the original
2603 // number of arguments.
2604 __ push(receiver);
2605 __ mov(receiver, length);
2606 // The arguments are at a one pointer size offset from elements.
2607 __ add(elements, elements, Operand(1 * kPointerSize));
2608
2609 // Loop through the arguments pushing them onto the execution
2610 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002611 Label invoke, loop;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002612 // length is a small non-negative integer, due to the test above.
Steve Block44f0eee2011-05-26 01:26:41 +01002613 __ cmp(length, Operand(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002614 __ b(eq, &invoke);
2615 __ bind(&loop);
2616 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
2617 __ push(scratch);
2618 __ sub(length, length, Operand(1), SetCC);
2619 __ b(ne, &loop);
2620
2621 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002622 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2623 LPointerMap* pointers = instr->pointer_map();
2624 LEnvironment* env = instr->deoptimization_environment();
2625 RecordPosition(pointers->position());
2626 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002627 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002628 pointers,
2629 env->deoptimization_index());
2630 // The number of arguments is stored in receiver which is r0, as expected
2631 // by InvokeFunction.
2632 v8::internal::ParameterCount actual(receiver);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002633 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
Steve Block1e0659c2011-05-24 12:43:12 +01002634 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002635}
2636
2637
2638void LCodeGen::DoPushArgument(LPushArgument* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002639 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002640 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
2641 Abort("DoPushArgument not implemented for double type.");
2642 } else {
2643 Register argument_reg = EmitLoadRegister(argument, ip);
2644 __ push(argument_reg);
2645 }
2646}
2647
2648
Steve Block1e0659c2011-05-24 12:43:12 +01002649void LCodeGen::DoContext(LContext* instr) {
2650 Register result = ToRegister(instr->result());
2651 __ mov(result, cp);
2652}
2653
2654
2655void LCodeGen::DoOuterContext(LOuterContext* instr) {
2656 Register context = ToRegister(instr->context());
2657 Register result = ToRegister(instr->result());
2658 __ ldr(result,
2659 MemOperand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2660 __ ldr(result, FieldMemOperand(result, JSFunction::kContextOffset));
2661}
2662
2663
Ben Murdochb0fe1622011-05-05 13:52:32 +01002664void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002665 Register context = ToRegister(instr->context());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002666 Register result = ToRegister(instr->result());
2667 __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
2668}
2669
2670
2671void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002672 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002673 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002674 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002675}
2676
2677
2678void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2679 int arity,
2680 LInstruction* instr) {
2681 // Change context if needed.
2682 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002683 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002684 scope()->contains_with() ||
2685 (scope()->num_heap_slots() > 0);
2686 if (change_context) {
2687 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2688 }
2689
2690 // Set r0 to arguments count if adaption is not needed. Assumes that r0
2691 // is available to write to at this point.
2692 if (!function->NeedsArgumentsAdaption()) {
2693 __ mov(r0, Operand(arity));
2694 }
2695
2696 LPointerMap* pointers = instr->pointer_map();
2697 RecordPosition(pointers->position());
2698
2699 // Invoke function.
2700 __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2701 __ Call(ip);
2702
2703 // Setup deoptimization.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002704 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002705
2706 // Restore context.
2707 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2708}
2709
2710
2711void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01002712 ASSERT(ToRegister(instr->result()).is(r0));
2713 __ mov(r1, Operand(instr->function()));
2714 CallKnownFunction(instr->function(), instr->arity(), instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002715}
2716
2717
2718void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002719 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2720 Register input = ToRegister(instr->InputAt(0));
2721 Register scratch = scratch0();
2722
2723 // Deoptimize if not a heap number.
2724 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2725 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2726 __ cmp(scratch, Operand(ip));
2727 DeoptimizeIf(ne, instr->environment());
2728
2729 Label done;
2730 Register exponent = scratch0();
2731 scratch = no_reg;
2732 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
2733 // Check the sign of the argument. If the argument is positive, just
2734 // return it. We do not need to patch the stack since |input| and
2735 // |result| are the same register and |input| would be restored
2736 // unchanged by popping safepoint registers.
2737 __ tst(exponent, Operand(HeapNumber::kSignMask));
2738 __ b(eq, &done);
2739
2740 // Input is negative. Reverse its sign.
2741 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002742 {
2743 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01002744
Ben Murdoch8b112d22011-06-08 16:22:53 +01002745 // Registers were saved at the safepoint, so we can use
2746 // many scratch registers.
2747 Register tmp1 = input.is(r1) ? r0 : r1;
2748 Register tmp2 = input.is(r2) ? r0 : r2;
2749 Register tmp3 = input.is(r3) ? r0 : r3;
2750 Register tmp4 = input.is(r4) ? r0 : r4;
Steve Block1e0659c2011-05-24 12:43:12 +01002751
Ben Murdoch8b112d22011-06-08 16:22:53 +01002752 // exponent: floating point exponent value.
Steve Block1e0659c2011-05-24 12:43:12 +01002753
Ben Murdoch8b112d22011-06-08 16:22:53 +01002754 Label allocated, slow;
2755 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
2756 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
2757 __ b(&allocated);
Steve Block1e0659c2011-05-24 12:43:12 +01002758
Ben Murdoch8b112d22011-06-08 16:22:53 +01002759 // Slow case: Call the runtime system to do the number allocation.
2760 __ bind(&slow);
Steve Block1e0659c2011-05-24 12:43:12 +01002761
Ben Murdoch8b112d22011-06-08 16:22:53 +01002762 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2763 // Set the pointer to the new heap number in tmp.
2764 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
2765 // Restore input_reg after call to runtime.
2766 __ LoadFromSafepointRegisterSlot(input, input);
2767 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002768
Ben Murdoch8b112d22011-06-08 16:22:53 +01002769 __ bind(&allocated);
2770 // exponent: floating point exponent value.
2771 // tmp1: allocated heap number.
2772 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
2773 __ str(exponent, FieldMemOperand(tmp1, HeapNumber::kExponentOffset));
2774 __ ldr(tmp2, FieldMemOperand(input, HeapNumber::kMantissaOffset));
2775 __ str(tmp2, FieldMemOperand(tmp1, HeapNumber::kMantissaOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002776
Ben Murdoch8b112d22011-06-08 16:22:53 +01002777 __ StoreToSafepointRegisterSlot(tmp1, input);
2778 }
Steve Block1e0659c2011-05-24 12:43:12 +01002779
2780 __ bind(&done);
2781}
2782
2783
2784void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2785 Register input = ToRegister(instr->InputAt(0));
2786 __ cmp(input, Operand(0));
2787 // We can make rsb conditional because the previous cmp instruction
2788 // will clear the V (overflow) flag and rsb won't set this flag
2789 // if input is positive.
2790 __ rsb(input, input, Operand(0), SetCC, mi);
2791 // Deoptimize on overflow.
2792 DeoptimizeIf(vs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002793}
2794
2795
2796void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002797 // Class for deferred case.
2798 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2799 public:
2800 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2801 LUnaryMathOperation* instr)
2802 : LDeferredCode(codegen), instr_(instr) { }
2803 virtual void Generate() {
2804 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2805 }
2806 private:
2807 LUnaryMathOperation* instr_;
2808 };
2809
2810 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2811 Representation r = instr->hydrogen()->value()->representation();
2812 if (r.IsDouble()) {
2813 DwVfpRegister input = ToDoubleRegister(instr->InputAt(0));
2814 __ vabs(input, input);
2815 } else if (r.IsInteger32()) {
2816 EmitIntegerMathAbs(instr);
2817 } else {
2818 // Representation is tagged.
2819 DeferredMathAbsTaggedHeapNumber* deferred =
2820 new DeferredMathAbsTaggedHeapNumber(this, instr);
2821 Register input = ToRegister(instr->InputAt(0));
2822 // Smi check.
2823 __ JumpIfNotSmi(input, deferred->entry());
2824 // If smi, handle it directly.
2825 EmitIntegerMathAbs(instr);
2826 __ bind(deferred->exit());
2827 }
2828}
2829
2830
Ben Murdochb0fe1622011-05-05 13:52:32 +01002831void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002832 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002833 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002834 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01002835 Register scratch1 = scratch0();
2836 Register scratch2 = ToRegister(instr->TempAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002837
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002838 __ EmitVFPTruncate(kRoundToMinusInf,
2839 single_scratch,
2840 input,
2841 scratch1,
2842 scratch2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002843 DeoptimizeIf(ne, instr->environment());
2844
2845 // Move the result back to general purpose register r0.
2846 __ vmov(result, single_scratch);
2847
Steve Block44f0eee2011-05-26 01:26:41 +01002848 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2849 // Test for -0.
2850 Label done;
2851 __ cmp(result, Operand(0));
2852 __ b(ne, &done);
2853 __ vmov(scratch1, input.high());
2854 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2855 DeoptimizeIf(ne, instr->environment());
2856 __ bind(&done);
2857 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002858}
2859
2860
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002861void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2862 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2863 Register result = ToRegister(instr->result());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002864 Register scratch1 = result;
2865 Register scratch2 = scratch0();
2866 Label done, check_sign_on_zero;
2867
2868 // Extract exponent bits.
2869 __ vmov(scratch1, input.high());
2870 __ ubfx(scratch2,
2871 scratch1,
2872 HeapNumber::kExponentShift,
2873 HeapNumber::kExponentBits);
2874
2875 // If the number is in ]-0.5, +0.5[, the result is +/- 0.
2876 __ cmp(scratch2, Operand(HeapNumber::kExponentBias - 2));
2877 __ mov(result, Operand(0), LeaveCC, le);
2878 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2879 __ b(le, &check_sign_on_zero);
2880 } else {
2881 __ b(le, &done);
2882 }
2883
2884 // The following conversion will not work with numbers
2885 // outside of ]-2^32, 2^32[.
2886 __ cmp(scratch2, Operand(HeapNumber::kExponentBias + 32));
2887 DeoptimizeIf(ge, instr->environment());
2888
2889 // Save the original sign for later comparison.
2890 __ and_(scratch2, scratch1, Operand(HeapNumber::kSignMask));
2891
2892 __ vmov(double_scratch0(), 0.5);
2893 __ vadd(input, input, double_scratch0());
2894
2895 // Check sign of the result: if the sign changed, the input
2896 // value was in ]0.5, 0[ and the result should be -0.
2897 __ vmov(scratch1, input.high());
2898 __ eor(scratch1, scratch1, Operand(scratch2), SetCC);
2899 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2900 DeoptimizeIf(mi, instr->environment());
2901 } else {
2902 __ mov(result, Operand(0), LeaveCC, mi);
2903 __ b(mi, &done);
2904 }
2905
2906 __ EmitVFPTruncate(kRoundToMinusInf,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002907 double_scratch0().low(),
2908 input,
2909 scratch1,
2910 scratch2);
2911 DeoptimizeIf(ne, instr->environment());
2912 __ vmov(result, double_scratch0().low());
2913
Steve Block44f0eee2011-05-26 01:26:41 +01002914 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2915 // Test for -0.
Steve Block44f0eee2011-05-26 01:26:41 +01002916 __ cmp(result, Operand(0));
2917 __ b(ne, &done);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002918 __ bind(&check_sign_on_zero);
Steve Block44f0eee2011-05-26 01:26:41 +01002919 __ vmov(scratch1, input.high());
2920 __ tst(scratch1, Operand(HeapNumber::kSignMask));
2921 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01002922 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01002923 __ bind(&done);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002924}
2925
2926
Ben Murdochb0fe1622011-05-05 13:52:32 +01002927void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002928 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb8e0da22011-05-16 14:20:40 +01002929 ASSERT(ToDoubleRegister(instr->result()).is(input));
2930 __ vsqrt(input, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002931}
2932
2933
Steve Block44f0eee2011-05-26 01:26:41 +01002934void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2935 DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
2936 Register scratch = scratch0();
2937 SwVfpRegister single_scratch = double_scratch0().low();
2938 DoubleRegister double_scratch = double_scratch0();
2939 ASSERT(ToDoubleRegister(instr->result()).is(input));
2940
2941 // Add +0 to convert -0 to +0.
2942 __ mov(scratch, Operand(0));
2943 __ vmov(single_scratch, scratch);
2944 __ vcvt_f64_s32(double_scratch, single_scratch);
2945 __ vadd(input, input, double_scratch);
2946 __ vsqrt(input, input);
2947}
2948
2949
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002950void LCodeGen::DoPower(LPower* instr) {
2951 LOperand* left = instr->InputAt(0);
2952 LOperand* right = instr->InputAt(1);
2953 Register scratch = scratch0();
2954 DoubleRegister result_reg = ToDoubleRegister(instr->result());
2955 Representation exponent_type = instr->hydrogen()->right()->representation();
2956 if (exponent_type.IsDouble()) {
2957 // Prepare arguments and call C function.
2958 __ PrepareCallCFunction(4, scratch);
2959 __ vmov(r0, r1, ToDoubleRegister(left));
2960 __ vmov(r2, r3, ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01002961 __ CallCFunction(
2962 ExternalReference::power_double_double_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002963 } else if (exponent_type.IsInteger32()) {
2964 ASSERT(ToRegister(right).is(r0));
2965 // Prepare arguments and call C function.
2966 __ PrepareCallCFunction(4, scratch);
2967 __ mov(r2, ToRegister(right));
2968 __ vmov(r0, r1, ToDoubleRegister(left));
Steve Block44f0eee2011-05-26 01:26:41 +01002969 __ CallCFunction(
2970 ExternalReference::power_double_int_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002971 } else {
2972 ASSERT(exponent_type.IsTagged());
2973 ASSERT(instr->hydrogen()->left()->representation().IsDouble());
2974
2975 Register right_reg = ToRegister(right);
2976
2977 // Check for smi on the right hand side.
2978 Label non_smi, call;
2979 __ JumpIfNotSmi(right_reg, &non_smi);
2980
2981 // Untag smi and convert it to a double.
2982 __ SmiUntag(right_reg);
2983 SwVfpRegister single_scratch = double_scratch0().low();
2984 __ vmov(single_scratch, right_reg);
2985 __ vcvt_f64_s32(result_reg, single_scratch);
2986 __ jmp(&call);
2987
2988 // Heap number map check.
2989 __ bind(&non_smi);
2990 __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
2991 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2992 __ cmp(scratch, Operand(ip));
2993 DeoptimizeIf(ne, instr->environment());
2994 int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
2995 __ add(scratch, right_reg, Operand(value_offset));
2996 __ vldr(result_reg, scratch, 0);
2997
2998 // Prepare arguments and call C function.
2999 __ bind(&call);
3000 __ PrepareCallCFunction(4, scratch);
3001 __ vmov(r0, r1, ToDoubleRegister(left));
3002 __ vmov(r2, r3, result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01003003 __ CallCFunction(
3004 ExternalReference::power_double_double_function(isolate()), 4);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003005 }
3006 // Store the result in the result register.
3007 __ GetCFunctionDoubleResult(result_reg);
3008}
3009
3010
3011void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
3012 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3013 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3014 TranscendentalCacheStub::UNTAGGED);
3015 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3016}
3017
3018
3019void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3020 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3021 TranscendentalCacheStub stub(TranscendentalCache::COS,
3022 TranscendentalCacheStub::UNTAGGED);
3023 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3024}
3025
3026
3027void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3028 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3029 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3030 TranscendentalCacheStub::UNTAGGED);
3031 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3032}
3033
3034
Ben Murdochb0fe1622011-05-05 13:52:32 +01003035void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3036 switch (instr->op()) {
3037 case kMathAbs:
3038 DoMathAbs(instr);
3039 break;
3040 case kMathFloor:
3041 DoMathFloor(instr);
3042 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003043 case kMathRound:
3044 DoMathRound(instr);
3045 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003046 case kMathSqrt:
3047 DoMathSqrt(instr);
3048 break;
Steve Block44f0eee2011-05-26 01:26:41 +01003049 case kMathPowHalf:
3050 DoMathPowHalf(instr);
3051 break;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003052 case kMathCos:
3053 DoMathCos(instr);
3054 break;
3055 case kMathSin:
3056 DoMathSin(instr);
3057 break;
3058 case kMathLog:
3059 DoMathLog(instr);
3060 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003061 default:
3062 Abort("Unimplemented type of LUnaryMathOperation.");
3063 UNREACHABLE();
3064 }
3065}
3066
3067
Ben Murdoch8b112d22011-06-08 16:22:53 +01003068void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3069 ASSERT(ToRegister(instr->function()).is(r1));
3070 ASSERT(instr->HasPointerMap());
3071 ASSERT(instr->HasDeoptimizationEnvironment());
3072 LPointerMap* pointers = instr->pointer_map();
3073 LEnvironment* env = instr->deoptimization_environment();
3074 RecordPosition(pointers->position());
3075 RegisterEnvironmentForDeoptimization(env);
3076 SafepointGenerator generator(this, pointers, env->deoptimization_index());
3077 ParameterCount count(instr->arity());
3078 __ InvokeFunction(r1, count, CALL_FUNCTION, &generator);
3079 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3080}
3081
3082
Ben Murdochb0fe1622011-05-05 13:52:32 +01003083void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003084 ASSERT(ToRegister(instr->result()).is(r0));
3085
3086 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003087 Handle<Code> ic =
3088 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003089 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3090 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003091}
3092
3093
3094void LCodeGen::DoCallNamed(LCallNamed* instr) {
3095 ASSERT(ToRegister(instr->result()).is(r0));
3096
3097 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003098 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
3099 arity, NOT_IN_LOOP);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003100 __ mov(r2, Operand(instr->name()));
3101 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3102 // Restore context register.
3103 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3104}
3105
3106
3107void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003108 ASSERT(ToRegister(instr->result()).is(r0));
3109
3110 int arity = instr->arity();
3111 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
3112 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3113 __ Drop(1);
3114 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003115}
3116
3117
3118void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003119 ASSERT(ToRegister(instr->result()).is(r0));
3120
3121 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01003122 Handle<Code> ic =
3123 isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003124 __ mov(r2, Operand(instr->name()));
3125 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
3126 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003127}
3128
3129
3130void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3131 ASSERT(ToRegister(instr->result()).is(r0));
3132 __ mov(r1, Operand(instr->target()));
3133 CallKnownFunction(instr->target(), instr->arity(), instr);
3134}
3135
3136
3137void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003138 ASSERT(ToRegister(instr->InputAt(0)).is(r1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003139 ASSERT(ToRegister(instr->result()).is(r0));
3140
Steve Block44f0eee2011-05-26 01:26:41 +01003141 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003142 __ mov(r0, Operand(instr->arity()));
3143 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
3144}
3145
3146
3147void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3148 CallRuntime(instr->function(), instr->arity(), instr);
3149}
3150
3151
3152void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003153 Register object = ToRegister(instr->object());
3154 Register value = ToRegister(instr->value());
3155 Register scratch = scratch0();
3156 int offset = instr->offset();
3157
3158 ASSERT(!object.is(value));
3159
3160 if (!instr->transition().is_null()) {
3161 __ mov(scratch, Operand(instr->transition()));
3162 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3163 }
3164
3165 // Do the store.
3166 if (instr->is_in_object()) {
3167 __ str(value, FieldMemOperand(object, offset));
3168 if (instr->needs_write_barrier()) {
3169 // Update the write barrier for the object for in-object properties.
3170 __ RecordWrite(object, Operand(offset), value, scratch);
3171 }
3172 } else {
3173 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
3174 __ str(value, FieldMemOperand(scratch, offset));
3175 if (instr->needs_write_barrier()) {
3176 // Update the write barrier for the properties array.
3177 // object is used as a scratch register.
3178 __ RecordWrite(scratch, Operand(offset), value, object);
3179 }
3180 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003181}
3182
3183
3184void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3185 ASSERT(ToRegister(instr->object()).is(r1));
3186 ASSERT(ToRegister(instr->value()).is(r0));
3187
3188 // Name is always in r2.
3189 __ mov(r2, Operand(instr->name()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003190 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003191 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3192 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003193 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3194}
3195
3196
3197void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003198 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
Steve Block9fac8402011-05-12 15:51:54 +01003199 DeoptimizeIf(hs, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003200}
3201
3202
3203void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003204 Register value = ToRegister(instr->value());
3205 Register elements = ToRegister(instr->object());
3206 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3207 Register scratch = scratch0();
3208
3209 // Do the store.
3210 if (instr->key()->IsConstantOperand()) {
3211 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3212 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3213 int offset =
3214 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3215 __ str(value, FieldMemOperand(elements, offset));
3216 } else {
3217 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
3218 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3219 }
3220
3221 if (instr->hydrogen()->NeedsWriteBarrier()) {
3222 // Compute address of modified element and store it into key register.
3223 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
3224 __ RecordWrite(elements, key, value);
3225 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003226}
3227
3228
Steve Block44f0eee2011-05-26 01:26:41 +01003229void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3230 LStoreKeyedSpecializedArrayElement* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003231
3232 Register external_pointer = ToRegister(instr->external_pointer());
3233 Register key = ToRegister(instr->key());
Ben Murdoch8b112d22011-06-08 16:22:53 +01003234 ExternalArrayType array_type = instr->array_type();
3235 if (array_type == kExternalFloatArray) {
3236 CpuFeatures::Scope scope(VFP3);
3237 DwVfpRegister value(ToDoubleRegister(instr->value()));
3238 __ add(scratch0(), external_pointer, Operand(key, LSL, 2));
3239 __ vcvt_f32_f64(double_scratch0().low(), value);
3240 __ vstr(double_scratch0().low(), scratch0(), 0);
3241 } else {
3242 Register value(ToRegister(instr->value()));
3243 switch (array_type) {
3244 case kExternalPixelArray:
3245 // Clamp the value to [0..255].
3246 __ Usat(value, 8, Operand(value));
3247 __ strb(value, MemOperand(external_pointer, key));
3248 break;
3249 case kExternalByteArray:
3250 case kExternalUnsignedByteArray:
3251 __ strb(value, MemOperand(external_pointer, key));
3252 break;
3253 case kExternalShortArray:
3254 case kExternalUnsignedShortArray:
3255 __ strh(value, MemOperand(external_pointer, key, LSL, 1));
3256 break;
3257 case kExternalIntArray:
3258 case kExternalUnsignedIntArray:
3259 __ str(value, MemOperand(external_pointer, key, LSL, 2));
3260 break;
3261 case kExternalFloatArray:
3262 UNREACHABLE();
3263 break;
3264 }
3265 }
Steve Block44f0eee2011-05-26 01:26:41 +01003266}
3267
3268
Ben Murdochb0fe1622011-05-05 13:52:32 +01003269void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3270 ASSERT(ToRegister(instr->object()).is(r2));
3271 ASSERT(ToRegister(instr->key()).is(r1));
3272 ASSERT(ToRegister(instr->value()).is(r0));
3273
Ben Murdoch8b112d22011-06-08 16:22:53 +01003274 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003275 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3276 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003277 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3278}
3279
3280
Ben Murdoch8b112d22011-06-08 16:22:53 +01003281void LCodeGen::DoStringAdd(LStringAdd* instr) {
3282 __ push(ToRegister(instr->left()));
3283 __ push(ToRegister(instr->right()));
3284 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
3285 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3286}
3287
3288
Steve Block1e0659c2011-05-24 12:43:12 +01003289void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3290 class DeferredStringCharCodeAt: public LDeferredCode {
3291 public:
3292 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3293 : LDeferredCode(codegen), instr_(instr) { }
3294 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3295 private:
3296 LStringCharCodeAt* instr_;
3297 };
3298
3299 Register scratch = scratch0();
3300 Register string = ToRegister(instr->string());
3301 Register index = no_reg;
3302 int const_index = -1;
3303 if (instr->index()->IsConstantOperand()) {
3304 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3305 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3306 if (!Smi::IsValid(const_index)) {
3307 // Guaranteed to be out of bounds because of the assert above.
3308 // So the bounds check that must dominate this instruction must
3309 // have deoptimized already.
3310 if (FLAG_debug_code) {
3311 __ Abort("StringCharCodeAt: out of bounds index.");
3312 }
3313 // No code needs to be generated.
3314 return;
3315 }
3316 } else {
3317 index = ToRegister(instr->index());
3318 }
3319 Register result = ToRegister(instr->result());
3320
3321 DeferredStringCharCodeAt* deferred =
3322 new DeferredStringCharCodeAt(this, instr);
3323
3324 Label flat_string, ascii_string, done;
3325
3326 // Fetch the instance type of the receiver into result register.
3327 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3328 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3329
3330 // We need special handling for non-flat strings.
3331 STATIC_ASSERT(kSeqStringTag == 0);
3332 __ tst(result, Operand(kStringRepresentationMask));
3333 __ b(eq, &flat_string);
3334
3335 // Handle non-flat strings.
3336 __ tst(result, Operand(kIsConsStringMask));
3337 __ b(eq, deferred->entry());
3338
3339 // ConsString.
3340 // Check whether the right hand side is the empty string (i.e. if
3341 // this is really a flat string in a cons string). If that is not
3342 // the case we would rather go to the runtime system now to flatten
3343 // the string.
3344 __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset));
3345 __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
3346 __ cmp(scratch, ip);
3347 __ b(ne, deferred->entry());
3348 // Get the first of the two strings and load its instance type.
3349 __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
3350 __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
3351 __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
3352 // If the first cons component is also non-flat, then go to runtime.
3353 STATIC_ASSERT(kSeqStringTag == 0);
3354 __ tst(result, Operand(kStringRepresentationMask));
3355 __ b(ne, deferred->entry());
3356
3357 // Check for 1-byte or 2-byte string.
3358 __ bind(&flat_string);
3359 STATIC_ASSERT(kAsciiStringTag != 0);
3360 __ tst(result, Operand(kStringEncodingMask));
3361 __ b(ne, &ascii_string);
3362
3363 // 2-byte string.
3364 // Load the 2-byte character code into the result register.
3365 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3366 if (instr->index()->IsConstantOperand()) {
3367 __ ldrh(result,
3368 FieldMemOperand(string,
3369 SeqTwoByteString::kHeaderSize + 2 * const_index));
3370 } else {
3371 __ add(scratch,
3372 string,
3373 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3374 __ ldrh(result, MemOperand(scratch, index, LSL, 1));
3375 }
3376 __ jmp(&done);
3377
3378 // ASCII string.
3379 // Load the byte into the result register.
3380 __ bind(&ascii_string);
3381 if (instr->index()->IsConstantOperand()) {
3382 __ ldrb(result, FieldMemOperand(string,
3383 SeqAsciiString::kHeaderSize + const_index));
3384 } else {
3385 __ add(scratch,
3386 string,
3387 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3388 __ ldrb(result, MemOperand(scratch, index));
3389 }
3390 __ bind(&done);
3391 __ bind(deferred->exit());
3392}
3393
3394
3395void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3396 Register string = ToRegister(instr->string());
3397 Register result = ToRegister(instr->result());
3398 Register scratch = scratch0();
3399
3400 // TODO(3095996): Get rid of this. For now, we need to make the
3401 // result register contain a valid pointer because it is already
3402 // contained in the register pointer map.
3403 __ mov(result, Operand(0));
3404
Ben Murdoch8b112d22011-06-08 16:22:53 +01003405 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block1e0659c2011-05-24 12:43:12 +01003406 __ push(string);
3407 // Push the index as a smi. This is safe because of the checks in
3408 // DoStringCharCodeAt above.
3409 if (instr->index()->IsConstantOperand()) {
3410 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3411 __ mov(scratch, Operand(Smi::FromInt(const_index)));
3412 __ push(scratch);
3413 } else {
3414 Register index = ToRegister(instr->index());
3415 __ SmiTag(index);
3416 __ push(index);
3417 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003418 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003419 if (FLAG_debug_code) {
3420 __ AbortIfNotSmi(r0);
3421 }
3422 __ SmiUntag(r0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003423 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block1e0659c2011-05-24 12:43:12 +01003424}
3425
3426
Steve Block44f0eee2011-05-26 01:26:41 +01003427void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3428 class DeferredStringCharFromCode: public LDeferredCode {
3429 public:
3430 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3431 : LDeferredCode(codegen), instr_(instr) { }
3432 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3433 private:
3434 LStringCharFromCode* instr_;
3435 };
3436
3437 DeferredStringCharFromCode* deferred =
3438 new DeferredStringCharFromCode(this, instr);
3439
3440 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3441 Register char_code = ToRegister(instr->char_code());
3442 Register result = ToRegister(instr->result());
3443 ASSERT(!char_code.is(result));
3444
3445 __ cmp(char_code, Operand(String::kMaxAsciiCharCode));
3446 __ b(hi, deferred->entry());
3447 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
3448 __ add(result, result, Operand(char_code, LSL, kPointerSizeLog2));
3449 __ ldr(result, FieldMemOperand(result, FixedArray::kHeaderSize));
3450 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3451 __ cmp(result, ip);
3452 __ b(eq, deferred->entry());
3453 __ bind(deferred->exit());
3454}
3455
3456
3457void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3458 Register char_code = ToRegister(instr->char_code());
3459 Register result = ToRegister(instr->result());
3460
3461 // TODO(3095996): Get rid of this. For now, we need to make the
3462 // result register contain a valid pointer because it is already
3463 // contained in the register pointer map.
3464 __ mov(result, Operand(0));
3465
Ben Murdoch8b112d22011-06-08 16:22:53 +01003466 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Steve Block44f0eee2011-05-26 01:26:41 +01003467 __ SmiTag(char_code);
3468 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003469 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003470 __ StoreToSafepointRegisterSlot(r0, result);
Steve Block44f0eee2011-05-26 01:26:41 +01003471}
3472
3473
Steve Block1e0659c2011-05-24 12:43:12 +01003474void LCodeGen::DoStringLength(LStringLength* instr) {
3475 Register string = ToRegister(instr->InputAt(0));
3476 Register result = ToRegister(instr->result());
3477 __ ldr(result, FieldMemOperand(string, String::kLengthOffset));
3478}
3479
3480
Ben Murdochb0fe1622011-05-05 13:52:32 +01003481void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003482 LOperand* input = instr->InputAt(0);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003483 ASSERT(input->IsRegister() || input->IsStackSlot());
3484 LOperand* output = instr->result();
3485 ASSERT(output->IsDoubleRegister());
3486 SwVfpRegister single_scratch = double_scratch0().low();
3487 if (input->IsStackSlot()) {
3488 Register scratch = scratch0();
3489 __ ldr(scratch, ToMemOperand(input));
3490 __ vmov(single_scratch, scratch);
3491 } else {
3492 __ vmov(single_scratch, ToRegister(input));
3493 }
3494 __ vcvt_f64_s32(ToDoubleRegister(output), single_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003495}
3496
3497
3498void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3499 class DeferredNumberTagI: public LDeferredCode {
3500 public:
3501 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3502 : LDeferredCode(codegen), instr_(instr) { }
3503 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3504 private:
3505 LNumberTagI* instr_;
3506 };
3507
Steve Block1e0659c2011-05-24 12:43:12 +01003508 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003509 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3510 Register reg = ToRegister(input);
3511
3512 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3513 __ SmiTag(reg, SetCC);
3514 __ b(vs, deferred->entry());
3515 __ bind(deferred->exit());
3516}
3517
3518
3519void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3520 Label slow;
Steve Block1e0659c2011-05-24 12:43:12 +01003521 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003522 DoubleRegister dbl_scratch = d0;
3523 SwVfpRegister flt_scratch = s0;
3524
3525 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003526 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003527
3528 // There was overflow, so bits 30 and 31 of the original integer
3529 // disagree. Try to allocate a heap number in new space and store
3530 // the value in there. If that fails, call the runtime system.
3531 Label done;
3532 __ SmiUntag(reg);
3533 __ eor(reg, reg, Operand(0x80000000));
3534 __ vmov(flt_scratch, reg);
3535 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
3536 if (FLAG_inline_new) {
3537 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3538 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3539 if (!reg.is(r5)) __ mov(reg, r5);
3540 __ b(&done);
3541 }
3542
3543 // Slow case: Call the runtime system to do the number allocation.
3544 __ bind(&slow);
3545
3546 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3547 // register is stored, as this register is in the pointer map, but contains an
3548 // integer value.
3549 __ mov(ip, Operand(0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003550 __ StoreToSafepointRegisterSlot(ip, reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003551 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003552 if (!reg.is(r0)) __ mov(reg, r0);
3553
3554 // Done. Put the value in dbl_scratch into the value of the allocated heap
3555 // number.
3556 __ bind(&done);
3557 __ sub(ip, reg, Operand(kHeapObjectTag));
3558 __ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003559 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003560}
3561
3562
3563void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3564 class DeferredNumberTagD: public LDeferredCode {
3565 public:
3566 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3567 : LDeferredCode(codegen), instr_(instr) { }
3568 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3569 private:
3570 LNumberTagD* instr_;
3571 };
3572
Steve Block1e0659c2011-05-24 12:43:12 +01003573 DoubleRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01003574 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003575 Register reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003576 Register temp1 = ToRegister(instr->TempAt(0));
3577 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003578
3579 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3580 if (FLAG_inline_new) {
3581 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
3582 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
3583 } else {
3584 __ jmp(deferred->entry());
3585 }
3586 __ bind(deferred->exit());
3587 __ sub(ip, reg, Operand(kHeapObjectTag));
3588 __ vstr(input_reg, ip, HeapNumber::kValueOffset);
3589}
3590
3591
3592void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3593 // TODO(3095996): Get rid of this. For now, we need to make the
3594 // result register contain a valid pointer because it is already
3595 // contained in the register pointer map.
3596 Register reg = ToRegister(instr->result());
3597 __ mov(reg, Operand(0));
3598
Ben Murdoch8b112d22011-06-08 16:22:53 +01003599 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
3600 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003601 __ StoreToSafepointRegisterSlot(r0, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003602}
3603
3604
3605void LCodeGen::DoSmiTag(LSmiTag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003606 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003607 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3608 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3609 __ SmiTag(ToRegister(input));
3610}
3611
3612
3613void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003614 LOperand* input = instr->InputAt(0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003615 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3616 if (instr->needs_check()) {
3617 __ tst(ToRegister(input), Operand(kSmiTagMask));
3618 DeoptimizeIf(ne, instr->environment());
3619 }
3620 __ SmiUntag(ToRegister(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003621}
3622
3623
3624void LCodeGen::EmitNumberUntagD(Register input_reg,
3625 DoubleRegister result_reg,
3626 LEnvironment* env) {
Steve Block9fac8402011-05-12 15:51:54 +01003627 Register scratch = scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003628 SwVfpRegister flt_scratch = s0;
3629 ASSERT(!result_reg.is(d0));
3630
3631 Label load_smi, heap_number, done;
3632
3633 // Smi check.
3634 __ tst(input_reg, Operand(kSmiTagMask));
3635 __ b(eq, &load_smi);
3636
3637 // Heap number map check.
Steve Block9fac8402011-05-12 15:51:54 +01003638 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003639 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block9fac8402011-05-12 15:51:54 +01003640 __ cmp(scratch, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003641 __ b(eq, &heap_number);
3642
3643 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3644 __ cmp(input_reg, Operand(ip));
3645 DeoptimizeIf(ne, env);
3646
3647 // Convert undefined to NaN.
3648 __ LoadRoot(ip, Heap::kNanValueRootIndex);
3649 __ sub(ip, ip, Operand(kHeapObjectTag));
3650 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3651 __ jmp(&done);
3652
3653 // Heap number to double register conversion.
3654 __ bind(&heap_number);
3655 __ sub(ip, input_reg, Operand(kHeapObjectTag));
3656 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
3657 __ jmp(&done);
3658
3659 // Smi to double register conversion
3660 __ bind(&load_smi);
3661 __ SmiUntag(input_reg); // Untag smi before converting to float.
3662 __ vmov(flt_scratch, input_reg);
3663 __ vcvt_f64_s32(result_reg, flt_scratch);
3664 __ SmiTag(input_reg); // Retag smi.
3665 __ bind(&done);
3666}
3667
3668
3669class DeferredTaggedToI: public LDeferredCode {
3670 public:
3671 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3672 : LDeferredCode(codegen), instr_(instr) { }
3673 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3674 private:
3675 LTaggedToI* instr_;
3676};
3677
3678
3679void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003680 Register input_reg = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003681 Register scratch1 = scratch0();
3682 Register scratch2 = ToRegister(instr->TempAt(0));
3683 DwVfpRegister double_scratch = double_scratch0();
3684 SwVfpRegister single_scratch = double_scratch.low();
3685
3686 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2));
3687 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1));
3688
3689 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003690
3691 // Heap number map check.
Steve Block44f0eee2011-05-26 01:26:41 +01003692 __ ldr(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003693 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block44f0eee2011-05-26 01:26:41 +01003694 __ cmp(scratch1, Operand(ip));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003695
3696 if (instr->truncating()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003697 Register scratch3 = ToRegister(instr->TempAt(1));
3698 DwVfpRegister double_scratch2 = ToDoubleRegister(instr->TempAt(2));
3699 ASSERT(!scratch3.is(input_reg) &&
3700 !scratch3.is(scratch1) &&
3701 !scratch3.is(scratch2));
3702 // Performs a truncating conversion of a floating point number as used by
3703 // the JS bitwise operations.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003704 Label heap_number;
3705 __ b(eq, &heap_number);
3706 // Check for undefined. Undefined is converted to zero for truncating
3707 // conversions.
3708 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3709 __ cmp(input_reg, Operand(ip));
3710 DeoptimizeIf(ne, instr->environment());
3711 __ mov(input_reg, Operand(0));
3712 __ b(&done);
3713
3714 __ bind(&heap_number);
Steve Block44f0eee2011-05-26 01:26:41 +01003715 __ sub(scratch1, input_reg, Operand(kHeapObjectTag));
3716 __ vldr(double_scratch2, scratch1, HeapNumber::kValueOffset);
3717
3718 __ EmitECMATruncate(input_reg,
3719 double_scratch2,
3720 single_scratch,
3721 scratch1,
3722 scratch2,
3723 scratch3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003724
3725 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003726 CpuFeatures::Scope scope(VFP3);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003727 // Deoptimize if we don't have a heap number.
3728 DeoptimizeIf(ne, instr->environment());
3729
3730 __ sub(ip, input_reg, Operand(kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003731 __ vldr(double_scratch, ip, HeapNumber::kValueOffset);
3732 __ EmitVFPTruncate(kRoundToZero,
3733 single_scratch,
3734 double_scratch,
3735 scratch1,
3736 scratch2,
3737 kCheckForInexactConversion);
3738 DeoptimizeIf(ne, instr->environment());
3739 // Load the result.
3740 __ vmov(input_reg, single_scratch);
3741
Ben Murdochb0fe1622011-05-05 13:52:32 +01003742 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Steve Block44f0eee2011-05-26 01:26:41 +01003743 __ cmp(input_reg, Operand(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003744 __ b(ne, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01003745 __ vmov(scratch1, double_scratch.high());
3746 __ tst(scratch1, Operand(HeapNumber::kSignMask));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003747 DeoptimizeIf(ne, instr->environment());
3748 }
3749 }
3750 __ bind(&done);
3751}
3752
3753
3754void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003755 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003756 ASSERT(input->IsRegister());
3757 ASSERT(input->Equals(instr->result()));
3758
3759 Register input_reg = ToRegister(input);
3760
3761 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3762
3763 // Smi check.
3764 __ tst(input_reg, Operand(kSmiTagMask));
3765 __ b(ne, deferred->entry());
3766
3767 // Smi to int32 conversion
3768 __ SmiUntag(input_reg); // Untag smi.
3769
3770 __ bind(deferred->exit());
3771}
3772
3773
3774void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003775 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003776 ASSERT(input->IsRegister());
3777 LOperand* result = instr->result();
3778 ASSERT(result->IsDoubleRegister());
3779
3780 Register input_reg = ToRegister(input);
3781 DoubleRegister result_reg = ToDoubleRegister(result);
3782
3783 EmitNumberUntagD(input_reg, result_reg, instr->environment());
3784}
3785
3786
3787void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01003788 Register result_reg = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01003789 Register scratch1 = scratch0();
3790 Register scratch2 = ToRegister(instr->TempAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01003791 DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
3792 DwVfpRegister double_scratch = double_scratch0();
3793 SwVfpRegister single_scratch = double_scratch0().low();
Steve Block1e0659c2011-05-24 12:43:12 +01003794
Steve Block44f0eee2011-05-26 01:26:41 +01003795 Label done;
Steve Block1e0659c2011-05-24 12:43:12 +01003796
Steve Block44f0eee2011-05-26 01:26:41 +01003797 if (instr->truncating()) {
3798 Register scratch3 = ToRegister(instr->TempAt(1));
3799 __ EmitECMATruncate(result_reg,
3800 double_input,
3801 single_scratch,
3802 scratch1,
3803 scratch2,
3804 scratch3);
3805 } else {
3806 VFPRoundingMode rounding_mode = kRoundToMinusInf;
3807 __ EmitVFPTruncate(rounding_mode,
3808 single_scratch,
3809 double_input,
3810 scratch1,
3811 scratch2,
3812 kCheckForInexactConversion);
3813 // Deoptimize if we had a vfp invalid exception,
3814 // including inexact operation.
Steve Block1e0659c2011-05-24 12:43:12 +01003815 DeoptimizeIf(ne, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01003816 // Retrieve the result.
3817 __ vmov(result_reg, single_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003818 }
Steve Block44f0eee2011-05-26 01:26:41 +01003819 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003820}
3821
3822
3823void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003824 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003825 __ tst(ToRegister(input), Operand(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003826 DeoptimizeIf(ne, instr->environment());
3827}
3828
3829
3830void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3831 LOperand* input = instr->InputAt(0);
3832 __ tst(ToRegister(input), Operand(kSmiTagMask));
3833 DeoptimizeIf(eq, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003834}
3835
3836
3837void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003838 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003839 Register scratch = scratch0();
3840 InstanceType first = instr->hydrogen()->first();
3841 InstanceType last = instr->hydrogen()->last();
3842
3843 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3844 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3845 __ cmp(scratch, Operand(first));
3846
3847 // If there is only one type in the interval check for equality.
3848 if (first == last) {
3849 DeoptimizeIf(ne, instr->environment());
3850 } else {
3851 DeoptimizeIf(lo, instr->environment());
3852 // Omit check for the last type.
3853 if (last != LAST_TYPE) {
3854 __ cmp(scratch, Operand(last));
3855 DeoptimizeIf(hi, instr->environment());
3856 }
3857 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003858}
3859
3860
3861void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003862 ASSERT(instr->InputAt(0)->IsRegister());
3863 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003864 __ cmp(reg, Operand(instr->hydrogen()->target()));
3865 DeoptimizeIf(ne, instr->environment());
3866}
3867
3868
3869void LCodeGen::DoCheckMap(LCheckMap* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003870 Register scratch = scratch0();
Steve Block1e0659c2011-05-24 12:43:12 +01003871 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003872 ASSERT(input->IsRegister());
3873 Register reg = ToRegister(input);
Steve Block9fac8402011-05-12 15:51:54 +01003874 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
3875 __ cmp(scratch, Operand(instr->hydrogen()->map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003876 DeoptimizeIf(ne, instr->environment());
3877}
3878
3879
Ben Murdochb8e0da22011-05-16 14:20:40 +01003880void LCodeGen::LoadHeapObject(Register result,
3881 Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01003882 if (heap()->InNewSpace(*object)) {
Steve Block9fac8402011-05-12 15:51:54 +01003883 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01003884 factory()->NewJSGlobalPropertyCell(object);
Steve Block9fac8402011-05-12 15:51:54 +01003885 __ mov(result, Operand(cell));
Ben Murdochb8e0da22011-05-16 14:20:40 +01003886 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
Steve Block9fac8402011-05-12 15:51:54 +01003887 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003888 __ mov(result, Operand(object));
Steve Block9fac8402011-05-12 15:51:54 +01003889 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003890}
3891
3892
3893void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003894 Register temp1 = ToRegister(instr->TempAt(0));
3895 Register temp2 = ToRegister(instr->TempAt(1));
Steve Block9fac8402011-05-12 15:51:54 +01003896
3897 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01003898 Handle<JSObject> current_prototype = instr->prototype();
Steve Block9fac8402011-05-12 15:51:54 +01003899
3900 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003901 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01003902
3903 // Check prototype maps up to the holder.
3904 while (!current_prototype.is_identical_to(holder)) {
3905 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
3906 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
3907 DeoptimizeIf(ne, instr->environment());
3908 current_prototype =
3909 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3910 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003911 LoadHeapObject(temp1, current_prototype);
Steve Block9fac8402011-05-12 15:51:54 +01003912 }
3913
3914 // Check the holder map.
3915 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
3916 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
3917 DeoptimizeIf(ne, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003918}
3919
3920
3921void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003922 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3923 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
3924 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3925 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
3926 __ Push(r3, r2, r1);
3927
3928 // Pick the right runtime function or stub to call.
3929 int length = instr->hydrogen()->length();
3930 if (instr->hydrogen()->IsCopyOnWrite()) {
3931 ASSERT(instr->hydrogen()->depth() == 1);
3932 FastCloneShallowArrayStub::Mode mode =
3933 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3934 FastCloneShallowArrayStub stub(mode, length);
3935 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3936 } else if (instr->hydrogen()->depth() > 1) {
3937 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3938 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3939 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3940 } else {
3941 FastCloneShallowArrayStub::Mode mode =
3942 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3943 FastCloneShallowArrayStub stub(mode, length);
3944 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3945 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003946}
3947
3948
3949void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01003950 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3951 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
3952 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3953 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
3954 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3955 __ Push(r4, r3, r2, r1);
3956
3957 // Pick the right runtime function to call.
3958 if (instr->hydrogen()->depth() > 1) {
3959 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3960 } else {
3961 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3962 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003963}
3964
3965
Steve Block44f0eee2011-05-26 01:26:41 +01003966void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3967 ASSERT(ToRegister(instr->InputAt(0)).is(r0));
3968 __ push(r0);
3969 CallRuntime(Runtime::kToFastProperties, 1, instr);
3970}
3971
3972
Ben Murdochb0fe1622011-05-05 13:52:32 +01003973void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003974 Label materialized;
3975 // Registers will be used as follows:
3976 // r3 = JS function.
3977 // r7 = literals array.
3978 // r1 = regexp literal.
3979 // r0 = regexp literal clone.
3980 // r2 and r4-r6 are used as temporaries.
3981 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3982 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
3983 int literal_offset = FixedArray::kHeaderSize +
3984 instr->hydrogen()->literal_index() * kPointerSize;
3985 __ ldr(r1, FieldMemOperand(r7, literal_offset));
3986 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
3987 __ cmp(r1, ip);
3988 __ b(ne, &materialized);
3989
3990 // Create regexp literal using runtime function
3991 // Result will be in r0.
3992 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
3993 __ mov(r5, Operand(instr->hydrogen()->pattern()));
3994 __ mov(r4, Operand(instr->hydrogen()->flags()));
3995 __ Push(r7, r6, r5, r4);
3996 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3997 __ mov(r1, r0);
3998
3999 __ bind(&materialized);
4000 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4001 Label allocated, runtime_allocate;
4002
4003 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
4004 __ jmp(&allocated);
4005
4006 __ bind(&runtime_allocate);
4007 __ mov(r0, Operand(Smi::FromInt(size)));
4008 __ Push(r1, r0);
4009 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4010 __ pop(r1);
4011
4012 __ bind(&allocated);
4013 // Copy the content into the newly allocated memory.
4014 // (Unroll copy loop once for better throughput).
4015 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4016 __ ldr(r3, FieldMemOperand(r1, i));
4017 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
4018 __ str(r3, FieldMemOperand(r0, i));
4019 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
4020 }
4021 if ((size % (2 * kPointerSize)) != 0) {
4022 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
4023 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
4024 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004025}
4026
4027
4028void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01004029 // Use the fast case closure allocation code that allocates in new
4030 // space for nested functions that don't need literals cloning.
4031 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004032 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004033 if (!pretenure && shared_info->num_literals() == 0) {
4034 FastNewClosureStub stub(
4035 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004036 __ mov(r1, Operand(shared_info));
4037 __ push(r1);
4038 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4039 } else {
4040 __ mov(r2, Operand(shared_info));
4041 __ mov(r1, Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004042 ? factory()->true_value()
4043 : factory()->false_value()));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004044 __ Push(cp, r2, r1);
4045 CallRuntime(Runtime::kNewClosure, 3, instr);
4046 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004047}
4048
4049
4050void LCodeGen::DoTypeof(LTypeof* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004051 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004052 __ push(input);
4053 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004054}
4055
4056
4057void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004058 Register input = ToRegister(instr->InputAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01004059 Register result = ToRegister(instr->result());
4060 Label true_label;
4061 Label false_label;
4062 Label done;
4063
4064 Condition final_branch_condition = EmitTypeofIs(&true_label,
4065 &false_label,
4066 input,
4067 instr->type_literal());
4068 __ b(final_branch_condition, &true_label);
4069 __ bind(&false_label);
4070 __ LoadRoot(result, Heap::kFalseValueRootIndex);
4071 __ b(&done);
4072
4073 __ bind(&true_label);
4074 __ LoadRoot(result, Heap::kTrueValueRootIndex);
4075
4076 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004077}
4078
4079
4080void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004081 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004082 int true_block = chunk_->LookupDestination(instr->true_block_id());
4083 int false_block = chunk_->LookupDestination(instr->false_block_id());
4084 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4085 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4086
4087 Condition final_branch_condition = EmitTypeofIs(true_label,
4088 false_label,
4089 input,
4090 instr->type_literal());
4091
4092 EmitBranch(true_block, false_block, final_branch_condition);
4093}
4094
4095
4096Condition LCodeGen::EmitTypeofIs(Label* true_label,
4097 Label* false_label,
4098 Register input,
4099 Handle<String> type_name) {
Steve Block1e0659c2011-05-24 12:43:12 +01004100 Condition final_branch_condition = kNoCondition;
Steve Block9fac8402011-05-12 15:51:54 +01004101 Register scratch = scratch0();
Steve Block44f0eee2011-05-26 01:26:41 +01004102 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004103 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004104 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4105 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4106 __ cmp(input, Operand(ip));
4107 final_branch_condition = eq;
4108
Steve Block44f0eee2011-05-26 01:26:41 +01004109 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004110 __ JumpIfSmi(input, false_label);
4111 __ CompareObjectType(input, input, scratch, FIRST_NONSTRING_TYPE);
4112 __ b(ge, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004113 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4114 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004115 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004116
Steve Block44f0eee2011-05-26 01:26:41 +01004117 } else if (type_name->Equals(heap()->boolean_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004118 __ CompareRoot(input, Heap::kTrueValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004119 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004120 __ CompareRoot(input, Heap::kFalseValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004121 final_branch_condition = eq;
4122
Steve Block44f0eee2011-05-26 01:26:41 +01004123 } else if (type_name->Equals(heap()->undefined_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004124 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004125 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004126 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004127 // Check for undetectable objects => true.
4128 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
4129 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4130 __ tst(ip, Operand(1 << Map::kIsUndetectable));
4131 final_branch_condition = ne;
4132
Steve Block44f0eee2011-05-26 01:26:41 +01004133 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004134 __ JumpIfSmi(input, false_label);
4135 __ CompareObjectType(input, input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4136 final_branch_condition = ge;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004137
Steve Block44f0eee2011-05-26 01:26:41 +01004138 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004139 __ JumpIfSmi(input, false_label);
4140 __ CompareRoot(input, Heap::kNullValueRootIndex);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004141 __ b(eq, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004142 __ CompareObjectType(input, input, scratch, FIRST_JS_OBJECT_TYPE);
4143 __ b(lo, false_label);
4144 __ CompareInstanceType(input, scratch, FIRST_FUNCTION_CLASS_TYPE);
4145 __ b(hs, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004146 // Check for undetectable objects => false.
4147 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
4148 __ tst(ip, Operand(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004149 final_branch_condition = eq;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004150
4151 } else {
4152 final_branch_condition = ne;
4153 __ b(false_label);
4154 // A dead branch instruction will be generated after this point.
4155 }
4156
4157 return final_branch_condition;
4158}
4159
4160
Steve Block1e0659c2011-05-24 12:43:12 +01004161void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4162 Register result = ToRegister(instr->result());
4163 Label true_label;
4164 Label false_label;
4165 Label done;
4166
4167 EmitIsConstructCall(result, scratch0());
4168 __ b(eq, &true_label);
4169
4170 __ LoadRoot(result, Heap::kFalseValueRootIndex);
4171 __ b(&done);
4172
4173
4174 __ bind(&true_label);
4175 __ LoadRoot(result, Heap::kTrueValueRootIndex);
4176
4177 __ bind(&done);
4178}
4179
4180
4181void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4182 Register temp1 = ToRegister(instr->TempAt(0));
4183 int true_block = chunk_->LookupDestination(instr->true_block_id());
4184 int false_block = chunk_->LookupDestination(instr->false_block_id());
4185
4186 EmitIsConstructCall(temp1, scratch0());
4187 EmitBranch(true_block, false_block, eq);
4188}
4189
4190
4191void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
4192 ASSERT(!temp1.is(temp2));
4193 // Get the frame pointer for the calling frame.
4194 __ ldr(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4195
4196 // Skip the arguments adaptor frame if it exists.
4197 Label check_frame_marker;
4198 __ ldr(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
4199 __ cmp(temp2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4200 __ b(ne, &check_frame_marker);
4201 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
4202
4203 // Check the marker in the calling frame.
4204 __ bind(&check_frame_marker);
4205 __ ldr(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
4206 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
4207}
4208
4209
Ben Murdochb0fe1622011-05-05 13:52:32 +01004210void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4211 // No code for lazy bailout instruction. Used to capture environment after a
4212 // call for populating the safepoint data with deoptimization data.
4213}
4214
4215
4216void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004217 DeoptimizeIf(al, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004218}
4219
4220
4221void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004222 Register object = ToRegister(instr->object());
4223 Register key = ToRegister(instr->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004224 Register strict = scratch0();
4225 __ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
4226 __ Push(object, key, strict);
Steve Block1e0659c2011-05-24 12:43:12 +01004227 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4228 LPointerMap* pointers = instr->pointer_map();
4229 LEnvironment* env = instr->deoptimization_environment();
4230 RecordPosition(pointers->position());
4231 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb8e0da22011-05-16 14:20:40 +01004232 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01004233 pointers,
4234 env->deoptimization_index());
Ben Murdochb8e0da22011-05-16 14:20:40 +01004235 __ InvokeBuiltin(Builtins::DELETE, CALL_JS, &safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004236}
4237
4238
4239void LCodeGen::DoStackCheck(LStackCheck* instr) {
4240 // Perform stack overflow check.
4241 Label ok;
4242 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
4243 __ cmp(sp, Operand(ip));
4244 __ b(hs, &ok);
4245 StackCheckStub stub;
4246 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4247 __ bind(&ok);
4248}
4249
4250
4251void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01004252 // This is a pseudo-instruction that ensures that the environment here is
4253 // properly registered for deoptimization and records the assembler's PC
4254 // offset.
4255 LEnvironment* environment = instr->environment();
4256 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4257 instr->SpilledDoubleRegisterArray());
4258
4259 // If the environment were already registered, we would have no way of
4260 // backpatching it with the spill slot operands.
4261 ASSERT(!environment->HasBeenRegistered());
4262 RegisterEnvironmentForDeoptimization(environment);
4263 ASSERT(osr_pc_offset_ == -1);
4264 osr_pc_offset_ = masm()->pc_offset();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004265}
4266
4267
4268#undef __
4269
4270} } // namespace v8::internal