blob: 3bfb10f80e8afc4e15ff2cbd9012f41d2f403823 [file] [log] [blame]
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001// Copyright 2011 the V8 project authors. All rights reserved.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +000028#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
kasperl@chromium.orga5551262010-12-07 12:49:48 +000032#include "ia32/lithium-codegen-ia32.h"
33#include "code-stubs.h"
34#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39
40class SafepointGenerator : public PostCallGenerator {
41 public:
42 SafepointGenerator(LCodeGen* codegen,
43 LPointerMap* pointers,
44 int deoptimization_index)
45 : codegen_(codegen),
46 pointers_(pointers),
47 deoptimization_index_(deoptimization_index) { }
48 virtual ~SafepointGenerator() { }
49
50 virtual void Generate() {
51 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
52 }
53
54 private:
55 LCodeGen* codegen_;
56 LPointerMap* pointers_;
57 int deoptimization_index_;
58};
59
60
61#define __ masm()->
62
63bool LCodeGen::GenerateCode() {
64 HPhase phase("Code generation", chunk());
65 ASSERT(is_unused());
66 status_ = GENERATING;
67 CpuFeatures::Scope scope(SSE2);
68 return GeneratePrologue() &&
69 GenerateBody() &&
70 GenerateDeferredCode() &&
71 GenerateSafepointTable();
72}
73
74
75void LCodeGen::FinishCode(Handle<Code> code) {
76 ASSERT(is_done());
77 code->set_stack_slots(StackSlotCount());
78 code->set_safepoint_table_start(safepoints_.GetCodeOffset());
79 PopulateDeoptimizationData(code);
80}
81
82
83void LCodeGen::Abort(const char* format, ...) {
84 if (FLAG_trace_bailout) {
85 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
86 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
87 va_list arguments;
88 va_start(arguments, format);
89 OS::VPrint(format, arguments);
90 va_end(arguments);
91 PrintF("\n");
92 }
93 status_ = ABORTED;
94}
95
96
97void LCodeGen::Comment(const char* format, ...) {
98 if (!FLAG_code_comments) return;
99 char buffer[4 * KB];
100 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
101 va_list arguments;
102 va_start(arguments, format);
103 builder.AddFormattedList(format, arguments);
104 va_end(arguments);
105
106 // Copy the string before recording it in the assembler to avoid
107 // issues when the stack allocated buffer goes out of scope.
108 size_t length = builder.position();
109 Vector<char> copy = Vector<char>::New(length + 1);
110 memcpy(copy.start(), builder.Finalize(), copy.length());
111 masm()->RecordComment(copy.start());
112}
113
114
115bool LCodeGen::GeneratePrologue() {
116 ASSERT(is_generating());
117
118#ifdef DEBUG
119 if (strlen(FLAG_stop_at) > 0 &&
120 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
121 __ int3();
122 }
123#endif
124
125 __ push(ebp); // Caller's frame pointer.
126 __ mov(ebp, esp);
127 __ push(esi); // Callee's context.
128 __ push(edi); // Callee's JS function.
129
130 // Reserve space for the stack slots needed by the code.
131 int slots = StackSlotCount();
132 if (slots > 0) {
133 if (FLAG_debug_code) {
134 __ mov(Operand(eax), Immediate(slots));
135 Label loop;
136 __ bind(&loop);
137 __ push(Immediate(kSlotsZapValue));
138 __ dec(eax);
139 __ j(not_zero, &loop);
140 } else {
141 __ sub(Operand(esp), Immediate(slots * kPointerSize));
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000142#ifdef _MSC_VER
143 // On windows, you may not access the stack more than one page below
144 // the most recently mapped page. To make the allocated area randomly
145 // accessible, we write to each page in turn (the value is irrelevant).
146 const int kPageSize = 4 * KB;
147 for (int offset = slots * kPointerSize - kPageSize;
148 offset > 0;
149 offset -= kPageSize) {
150 __ mov(Operand(esp, offset), eax);
151 }
152#endif
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000153 }
154 }
155
156 // Trace the call.
157 if (FLAG_trace) {
158 __ CallRuntime(Runtime::kTraceEnter, 0);
159 }
160 return !is_aborted();
161}
162
163
164bool LCodeGen::GenerateBody() {
165 ASSERT(is_generating());
166 bool emit_instructions = true;
167 for (current_instruction_ = 0;
168 !is_aborted() && current_instruction_ < instructions_->length();
169 current_instruction_++) {
170 LInstruction* instr = instructions_->at(current_instruction_);
171 if (instr->IsLabel()) {
172 LLabel* label = LLabel::cast(instr);
173 emit_instructions = !label->HasReplacement();
174 }
175
176 if (emit_instructions) {
177 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
178 instr->CompileToNative(this);
179 }
180 }
181 return !is_aborted();
182}
183
184
185LInstruction* LCodeGen::GetNextInstruction() {
186 if (current_instruction_ < instructions_->length() - 1) {
187 return instructions_->at(current_instruction_ + 1);
188 } else {
189 return NULL;
190 }
191}
192
193
194bool LCodeGen::GenerateDeferredCode() {
195 ASSERT(is_generating());
196 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
197 LDeferredCode* code = deferred_[i];
198 __ bind(code->entry());
199 code->Generate();
200 __ jmp(code->exit());
201 }
202
203 // Deferred code is the last part of the instruction sequence. Mark
204 // the generated code as done unless we bailed out.
205 if (!is_aborted()) status_ = DONE;
206 return !is_aborted();
207}
208
209
210bool LCodeGen::GenerateSafepointTable() {
211 ASSERT(is_done());
212 safepoints_.Emit(masm(), StackSlotCount());
213 return !is_aborted();
214}
215
216
217Register LCodeGen::ToRegister(int index) const {
218 return Register::FromAllocationIndex(index);
219}
220
221
222XMMRegister LCodeGen::ToDoubleRegister(int index) const {
223 return XMMRegister::FromAllocationIndex(index);
224}
225
226
227Register LCodeGen::ToRegister(LOperand* op) const {
228 ASSERT(op->IsRegister());
229 return ToRegister(op->index());
230}
231
232
233XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
234 ASSERT(op->IsDoubleRegister());
235 return ToDoubleRegister(op->index());
236}
237
238
239int LCodeGen::ToInteger32(LConstantOperand* op) const {
240 Handle<Object> value = chunk_->LookupLiteral(op);
241 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
242 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
243 value->Number());
244 return static_cast<int32_t>(value->Number());
245}
246
247
248Immediate LCodeGen::ToImmediate(LOperand* op) {
249 LConstantOperand* const_op = LConstantOperand::cast(op);
250 Handle<Object> literal = chunk_->LookupLiteral(const_op);
251 Representation r = chunk_->LookupLiteralRepresentation(const_op);
252 if (r.IsInteger32()) {
253 ASSERT(literal->IsNumber());
254 return Immediate(static_cast<int32_t>(literal->Number()));
255 } else if (r.IsDouble()) {
256 Abort("unsupported double immediate");
257 }
258 ASSERT(r.IsTagged());
259 return Immediate(literal);
260}
261
262
263Operand LCodeGen::ToOperand(LOperand* op) const {
264 if (op->IsRegister()) return Operand(ToRegister(op));
265 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
266 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
267 int index = op->index();
268 if (index >= 0) {
269 // Local or spill slot. Skip the frame pointer, function, and
270 // context in the fixed part of the frame.
271 return Operand(ebp, -(index + 3) * kPointerSize);
272 } else {
273 // Incoming parameter. Skip the return address.
274 return Operand(ebp, -(index - 1) * kPointerSize);
275 }
276}
277
278
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000279Operand LCodeGen::HighOperand(LOperand* op) {
280 ASSERT(op->IsDoubleStackSlot());
281 int index = op->index();
282 int offset = (index >= 0) ? index + 3 : index - 1;
283 return Operand(ebp, -offset * kPointerSize);
284}
285
286
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000287void LCodeGen::WriteTranslation(LEnvironment* environment,
288 Translation* translation) {
289 if (environment == NULL) return;
290
291 // The translation includes one command per value in the environment.
292 int translation_size = environment->values()->length();
293 // The output frame height does not include the parameters.
294 int height = translation_size - environment->parameter_count();
295
296 WriteTranslation(environment->outer(), translation);
297 int closure_id = DefineDeoptimizationLiteral(environment->closure());
298 translation->BeginFrame(environment->ast_id(), closure_id, height);
299 for (int i = 0; i < translation_size; ++i) {
300 LOperand* value = environment->values()->at(i);
301 // spilled_registers_ and spilled_double_registers_ are either
302 // both NULL or both set.
303 if (environment->spilled_registers() != NULL && value != NULL) {
304 if (value->IsRegister() &&
305 environment->spilled_registers()[value->index()] != NULL) {
306 translation->MarkDuplicate();
307 AddToTranslation(translation,
308 environment->spilled_registers()[value->index()],
309 environment->HasTaggedValueAt(i));
310 } else if (
311 value->IsDoubleRegister() &&
312 environment->spilled_double_registers()[value->index()] != NULL) {
313 translation->MarkDuplicate();
314 AddToTranslation(
315 translation,
316 environment->spilled_double_registers()[value->index()],
317 false);
318 }
319 }
320
321 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
322 }
323}
324
325
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000326void LCodeGen::AddToTranslation(Translation* translation,
327 LOperand* op,
328 bool is_tagged) {
329 if (op == NULL) {
330 // TODO(twuerthinger): Introduce marker operands to indicate that this value
331 // is not present and must be reconstructed from the deoptimizer. Currently
332 // this is only used for the arguments object.
333 translation->StoreArgumentsObject();
334 } else if (op->IsStackSlot()) {
335 if (is_tagged) {
336 translation->StoreStackSlot(op->index());
337 } else {
338 translation->StoreInt32StackSlot(op->index());
339 }
340 } else if (op->IsDoubleStackSlot()) {
341 translation->StoreDoubleStackSlot(op->index());
342 } else if (op->IsArgument()) {
343 ASSERT(is_tagged);
344 int src_index = StackSlotCount() + op->index();
345 translation->StoreStackSlot(src_index);
346 } else if (op->IsRegister()) {
347 Register reg = ToRegister(op);
348 if (is_tagged) {
349 translation->StoreRegister(reg);
350 } else {
351 translation->StoreInt32Register(reg);
352 }
353 } else if (op->IsDoubleRegister()) {
354 XMMRegister reg = ToDoubleRegister(op);
355 translation->StoreDoubleRegister(reg);
356 } else if (op->IsConstantOperand()) {
357 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
358 int src_index = DefineDeoptimizationLiteral(literal);
359 translation->StoreLiteral(src_index);
360 } else {
361 UNREACHABLE();
362 }
363}
364
365
366void LCodeGen::CallCode(Handle<Code> code,
367 RelocInfo::Mode mode,
368 LInstruction* instr) {
369 if (instr != NULL) {
370 LPointerMap* pointers = instr->pointer_map();
371 RecordPosition(pointers->position());
372 __ call(code, mode);
373 RegisterLazyDeoptimization(instr);
374 } else {
375 LPointerMap no_pointers(0);
376 RecordPosition(no_pointers.position());
377 __ call(code, mode);
378 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
379 }
ager@chromium.org5f0c45f2010-12-17 08:51:21 +0000380
381 // Signal that we don't inline smi code before these stubs in the
382 // optimizing code generator.
383 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
384 code->kind() == Code::COMPARE_IC) {
385 __ nop();
386 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000387}
388
389
390void LCodeGen::CallRuntime(Runtime::Function* function,
391 int num_arguments,
392 LInstruction* instr) {
393 ASSERT(instr != NULL);
394 LPointerMap* pointers = instr->pointer_map();
395 ASSERT(pointers != NULL);
396 RecordPosition(pointers->position());
397
398 __ CallRuntime(function, num_arguments);
399 // Runtime calls to Throw are not supposed to ever return at the
400 // call site, so don't register lazy deoptimization for these. We do
401 // however have to record a safepoint since throwing exceptions can
402 // cause garbage collections.
403 // BUG(3243555): register a lazy deoptimization point at throw. We need
404 // it to be able to inline functions containing a throw statement.
405 if (!instr->IsThrow()) {
406 RegisterLazyDeoptimization(instr);
407 } else {
408 RecordSafepoint(instr->pointer_map(), Safepoint::kNoDeoptimizationIndex);
409 }
410}
411
412
413void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
414 // Create the environment to bailout to. If the call has side effects
415 // execution has to continue after the call otherwise execution can continue
416 // from a previous bailout point repeating the call.
417 LEnvironment* deoptimization_environment;
418 if (instr->HasDeoptimizationEnvironment()) {
419 deoptimization_environment = instr->deoptimization_environment();
420 } else {
421 deoptimization_environment = instr->environment();
422 }
423
424 RegisterEnvironmentForDeoptimization(deoptimization_environment);
425 RecordSafepoint(instr->pointer_map(),
426 deoptimization_environment->deoptimization_index());
427}
428
429
430void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
431 if (!environment->HasBeenRegistered()) {
432 // Physical stack frame layout:
433 // -x ............. -4 0 ..................................... y
434 // [incoming arguments] [spill slots] [pushed outgoing arguments]
435
436 // Layout of the environment:
437 // 0 ..................................................... size-1
438 // [parameters] [locals] [expression stack including arguments]
439
440 // Layout of the translation:
441 // 0 ........................................................ size - 1 + 4
442 // [expression stack including arguments] [locals] [4 words] [parameters]
443 // |>------------ translation_size ------------<|
444
445 int frame_count = 0;
446 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
447 ++frame_count;
448 }
449 Translation translation(&translations_, frame_count);
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000450 WriteTranslation(environment, &translation);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000451 int deoptimization_index = deoptimizations_.length();
452 environment->Register(deoptimization_index, translation.index());
453 deoptimizations_.Add(environment);
454 }
455}
456
457
458void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
459 RegisterEnvironmentForDeoptimization(environment);
460 ASSERT(environment->HasBeenRegistered());
461 int id = environment->deoptimization_index();
462 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
463 ASSERT(entry != NULL);
464 if (entry == NULL) {
465 Abort("bailout was not prepared");
466 return;
467 }
468
469 if (FLAG_deopt_every_n_times != 0) {
470 Handle<SharedFunctionInfo> shared(info_->shared_info());
471 Label no_deopt;
472 __ pushfd();
473 __ push(eax);
474 __ push(ebx);
475 __ mov(ebx, shared);
476 __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
477 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
478 __ j(not_zero, &no_deopt);
479 if (FLAG_trap_on_deopt) __ int3();
480 __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
481 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
482 __ pop(ebx);
483 __ pop(eax);
484 __ popfd();
485 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
486
487 __ bind(&no_deopt);
488 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
489 __ pop(ebx);
490 __ pop(eax);
491 __ popfd();
492 }
493
494 if (cc == no_condition) {
495 if (FLAG_trap_on_deopt) __ int3();
496 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
497 } else {
498 if (FLAG_trap_on_deopt) {
499 NearLabel done;
500 __ j(NegateCondition(cc), &done);
501 __ int3();
502 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
503 __ bind(&done);
504 } else {
505 __ j(cc, entry, RelocInfo::RUNTIME_ENTRY, not_taken);
506 }
507 }
508}
509
510
511void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
512 int length = deoptimizations_.length();
513 if (length == 0) return;
514 ASSERT(FLAG_deopt);
515 Handle<DeoptimizationInputData> data =
516 Factory::NewDeoptimizationInputData(length, TENURED);
517
518 data->SetTranslationByteArray(*translations_.CreateByteArray());
519 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
520
521 Handle<FixedArray> literals =
522 Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
523 for (int i = 0; i < deoptimization_literals_.length(); i++) {
524 literals->set(i, *deoptimization_literals_[i]);
525 }
526 data->SetLiteralArray(*literals);
527
528 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
529 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
530
531 // Populate the deoptimization entries.
532 for (int i = 0; i < length; i++) {
533 LEnvironment* env = deoptimizations_[i];
534 data->SetAstId(i, Smi::FromInt(env->ast_id()));
535 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
536 data->SetArgumentsStackHeight(i,
537 Smi::FromInt(env->arguments_stack_height()));
538 }
539 code->set_deoptimization_data(*data);
540}
541
542
543int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
544 int result = deoptimization_literals_.length();
545 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
546 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
547 }
548 deoptimization_literals_.Add(literal);
549 return result;
550}
551
552
553void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
554 ASSERT(deoptimization_literals_.length() == 0);
555
556 const ZoneList<Handle<JSFunction> >* inlined_closures =
557 chunk()->inlined_closures();
558
559 for (int i = 0, length = inlined_closures->length();
560 i < length;
561 i++) {
562 DefineDeoptimizationLiteral(inlined_closures->at(i));
563 }
564
565 inlined_function_count_ = deoptimization_literals_.length();
566}
567
568
569void LCodeGen::RecordSafepoint(LPointerMap* pointers,
570 int deoptimization_index) {
571 const ZoneList<LOperand*>* operands = pointers->operands();
572 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
573 deoptimization_index);
574 for (int i = 0; i < operands->length(); i++) {
575 LOperand* pointer = operands->at(i);
576 if (pointer->IsStackSlot()) {
577 safepoint.DefinePointerSlot(pointer->index());
578 }
579 }
580}
581
582
583void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
584 int arguments,
585 int deoptimization_index) {
586 const ZoneList<LOperand*>* operands = pointers->operands();
587 Safepoint safepoint =
588 safepoints_.DefineSafepointWithRegisters(
589 masm(), arguments, deoptimization_index);
590 for (int i = 0; i < operands->length(); i++) {
591 LOperand* pointer = operands->at(i);
592 if (pointer->IsStackSlot()) {
593 safepoint.DefinePointerSlot(pointer->index());
594 } else if (pointer->IsRegister()) {
595 safepoint.DefinePointerRegister(ToRegister(pointer));
596 }
597 }
598 // Register esi always contains a pointer to the context.
599 safepoint.DefinePointerRegister(esi);
600}
601
602
603void LCodeGen::RecordPosition(int position) {
604 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
605 masm()->positions_recorder()->RecordPosition(position);
606}
607
608
609void LCodeGen::DoLabel(LLabel* label) {
610 if (label->is_loop_header()) {
611 Comment(";;; B%d - LOOP entry", label->block_id());
612 } else {
613 Comment(";;; B%d", label->block_id());
614 }
615 __ bind(label->label());
616 current_block_ = label->block_id();
617 LCodeGen::DoGap(label);
618}
619
620
621void LCodeGen::DoParallelMove(LParallelMove* move) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000622 resolver_.Resolve(move);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000623}
624
625
626void LCodeGen::DoGap(LGap* gap) {
627 for (int i = LGap::FIRST_INNER_POSITION;
628 i <= LGap::LAST_INNER_POSITION;
629 i++) {
630 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
631 LParallelMove* move = gap->GetParallelMove(inner_pos);
632 if (move != NULL) DoParallelMove(move);
633 }
634
635 LInstruction* next = GetNextInstruction();
636 if (next != NULL && next->IsLazyBailout()) {
637 int pc = masm()->pc_offset();
638 safepoints_.SetPcAfterGap(pc);
639 }
640}
641
642
643void LCodeGen::DoParameter(LParameter* instr) {
644 // Nothing to do.
645}
646
647
648void LCodeGen::DoCallStub(LCallStub* instr) {
649 ASSERT(ToRegister(instr->result()).is(eax));
650 switch (instr->hydrogen()->major_key()) {
651 case CodeStub::RegExpConstructResult: {
652 RegExpConstructResultStub stub;
653 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
654 break;
655 }
656 case CodeStub::RegExpExec: {
657 RegExpExecStub stub;
658 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
659 break;
660 }
661 case CodeStub::SubString: {
662 SubStringStub stub;
663 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
664 break;
665 }
666 case CodeStub::StringCharAt: {
667 StringCharAtStub stub;
668 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
669 break;
670 }
671 case CodeStub::MathPow: {
672 MathPowStub stub;
673 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
674 break;
675 }
676 case CodeStub::NumberToString: {
677 NumberToStringStub stub;
678 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
679 break;
680 }
681 case CodeStub::StringAdd: {
682 StringAddStub stub(NO_STRING_ADD_FLAGS);
683 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
684 break;
685 }
686 case CodeStub::StringCompare: {
687 StringCompareStub stub;
688 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
689 break;
690 }
691 case CodeStub::TranscendentalCache: {
whesse@chromium.org023421e2010-12-21 12:19:12 +0000692 TranscendentalCacheStub stub(instr->transcendental_type(),
693 TranscendentalCacheStub::TAGGED);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000694 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
695 break;
696 }
697 default:
698 UNREACHABLE();
699 }
700}
701
702
703void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
704 // Nothing to do.
705}
706
707
708void LCodeGen::DoModI(LModI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000709 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000710 ASSERT(ToRegister(instr->result()).is(edx));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000711 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
712 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
713 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000714
715 Register right_reg = ToRegister(right);
716
717 // Check for x % 0.
718 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
719 __ test(right_reg, ToOperand(right));
720 DeoptimizeIf(zero, instr->environment());
721 }
722
723 // Sign extend to edx.
724 __ cdq();
725
726 // Check for (0 % -x) that will produce negative zero.
727 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
728 NearLabel positive_left;
729 NearLabel done;
730 __ test(eax, Operand(eax));
731 __ j(not_sign, &positive_left);
732 __ idiv(right_reg);
733
734 // Test the remainder for 0, because then the result would be -0.
735 __ test(edx, Operand(edx));
736 __ j(not_zero, &done);
737
738 DeoptimizeIf(no_condition, instr->environment());
739 __ bind(&positive_left);
740 __ idiv(right_reg);
741 __ bind(&done);
742 } else {
743 __ idiv(right_reg);
744 }
745}
746
747
748void LCodeGen::DoDivI(LDivI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000749 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000750 ASSERT(ToRegister(instr->result()).is(eax));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000751 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
752 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
753 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000754
755 Register left_reg = eax;
756
757 // Check for x / 0.
758 Register right_reg = ToRegister(right);
759 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
760 __ test(right_reg, ToOperand(right));
761 DeoptimizeIf(zero, instr->environment());
762 }
763
764 // Check for (0 / -x) that will produce negative zero.
765 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
766 NearLabel left_not_zero;
767 __ test(left_reg, Operand(left_reg));
768 __ j(not_zero, &left_not_zero);
769 __ test(right_reg, ToOperand(right));
770 DeoptimizeIf(sign, instr->environment());
771 __ bind(&left_not_zero);
772 }
773
774 // Check for (-kMinInt / -1).
775 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
776 NearLabel left_not_min_int;
777 __ cmp(left_reg, kMinInt);
778 __ j(not_zero, &left_not_min_int);
779 __ cmp(right_reg, -1);
780 DeoptimizeIf(zero, instr->environment());
781 __ bind(&left_not_min_int);
782 }
783
784 // Sign extend to edx.
785 __ cdq();
786 __ idiv(right_reg);
787
788 // Deoptimize if remainder is not 0.
789 __ test(edx, Operand(edx));
790 DeoptimizeIf(not_zero, instr->environment());
791}
792
793
794void LCodeGen::DoMulI(LMulI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000795 Register left = ToRegister(instr->InputAt(0));
796 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000797
798 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000799 __ mov(ToRegister(instr->TempAt(0)), left);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000800 }
801
802 if (right->IsConstantOperand()) {
803 __ imul(left, left, ToInteger32(LConstantOperand::cast(right)));
804 } else {
805 __ imul(left, ToOperand(right));
806 }
807
808 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
809 DeoptimizeIf(overflow, instr->environment());
810 }
811
812 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
813 // Bail out if the result is supposed to be negative zero.
814 NearLabel done;
815 __ test(left, Operand(left));
816 __ j(not_zero, &done);
817 if (right->IsConstantOperand()) {
818 if (ToInteger32(LConstantOperand::cast(right)) < 0) {
819 DeoptimizeIf(no_condition, instr->environment());
820 }
821 } else {
822 // Test the non-zero operand for negative sign.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000823 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000824 DeoptimizeIf(sign, instr->environment());
825 }
826 __ bind(&done);
827 }
828}
829
830
831void LCodeGen::DoBitI(LBitI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000832 LOperand* left = instr->InputAt(0);
833 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000834 ASSERT(left->Equals(instr->result()));
835 ASSERT(left->IsRegister());
836
837 if (right->IsConstantOperand()) {
838 int right_operand = ToInteger32(LConstantOperand::cast(right));
839 switch (instr->op()) {
840 case Token::BIT_AND:
841 __ and_(ToRegister(left), right_operand);
842 break;
843 case Token::BIT_OR:
844 __ or_(ToRegister(left), right_operand);
845 break;
846 case Token::BIT_XOR:
847 __ xor_(ToRegister(left), right_operand);
848 break;
849 default:
850 UNREACHABLE();
851 break;
852 }
853 } else {
854 switch (instr->op()) {
855 case Token::BIT_AND:
856 __ and_(ToRegister(left), ToOperand(right));
857 break;
858 case Token::BIT_OR:
859 __ or_(ToRegister(left), ToOperand(right));
860 break;
861 case Token::BIT_XOR:
862 __ xor_(ToRegister(left), ToOperand(right));
863 break;
864 default:
865 UNREACHABLE();
866 break;
867 }
868 }
869}
870
871
872void LCodeGen::DoShiftI(LShiftI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000873 LOperand* left = instr->InputAt(0);
874 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000875 ASSERT(left->Equals(instr->result()));
876 ASSERT(left->IsRegister());
877 if (right->IsRegister()) {
878 ASSERT(ToRegister(right).is(ecx));
879
880 switch (instr->op()) {
881 case Token::SAR:
882 __ sar_cl(ToRegister(left));
883 break;
884 case Token::SHR:
885 __ shr_cl(ToRegister(left));
886 if (instr->can_deopt()) {
887 __ test(ToRegister(left), Immediate(0x80000000));
888 DeoptimizeIf(not_zero, instr->environment());
889 }
890 break;
891 case Token::SHL:
892 __ shl_cl(ToRegister(left));
893 break;
894 default:
895 UNREACHABLE();
896 break;
897 }
898 } else {
899 int value = ToInteger32(LConstantOperand::cast(right));
900 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
901 switch (instr->op()) {
902 case Token::SAR:
903 if (shift_count != 0) {
904 __ sar(ToRegister(left), shift_count);
905 }
906 break;
907 case Token::SHR:
908 if (shift_count == 0 && instr->can_deopt()) {
909 __ test(ToRegister(left), Immediate(0x80000000));
910 DeoptimizeIf(not_zero, instr->environment());
911 } else {
912 __ shr(ToRegister(left), shift_count);
913 }
914 break;
915 case Token::SHL:
916 if (shift_count != 0) {
917 __ shl(ToRegister(left), shift_count);
918 }
919 break;
920 default:
921 UNREACHABLE();
922 break;
923 }
924 }
925}
926
927
928void LCodeGen::DoSubI(LSubI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000929 LOperand* left = instr->InputAt(0);
930 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000931 ASSERT(left->Equals(instr->result()));
932
933 if (right->IsConstantOperand()) {
934 __ sub(ToOperand(left), ToImmediate(right));
935 } else {
936 __ sub(ToRegister(left), ToOperand(right));
937 }
938 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
939 DeoptimizeIf(overflow, instr->environment());
940 }
941}
942
943
944void LCodeGen::DoConstantI(LConstantI* instr) {
945 ASSERT(instr->result()->IsRegister());
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000946 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000947}
948
949
950void LCodeGen::DoConstantD(LConstantD* instr) {
951 ASSERT(instr->result()->IsDoubleRegister());
952 XMMRegister res = ToDoubleRegister(instr->result());
953 double v = instr->value();
954 // Use xor to produce +0.0 in a fast and compact way, but avoid to
955 // do so if the constant is -0.0.
956 if (BitCast<uint64_t, double>(v) == 0) {
957 __ xorpd(res, res);
958 } else {
959 int32_t v_int32 = static_cast<int32_t>(v);
960 if (static_cast<double>(v_int32) == v) {
961 __ push_imm32(v_int32);
962 __ cvtsi2sd(res, Operand(esp, 0));
963 __ add(Operand(esp), Immediate(kPointerSize));
964 } else {
965 uint64_t int_val = BitCast<uint64_t, double>(v);
966 int32_t lower = static_cast<int32_t>(int_val);
967 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
968 __ push_imm32(upper);
969 __ push_imm32(lower);
970 __ movdbl(res, Operand(esp, 0));
971 __ add(Operand(esp), Immediate(2 * kPointerSize));
972 }
973 }
974}
975
976
977void LCodeGen::DoConstantT(LConstantT* instr) {
978 ASSERT(instr->result()->IsRegister());
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000979 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000980}
981
982
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000983void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000984 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000985 Register array = ToRegister(instr->InputAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000986 __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
987}
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000988
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000989
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000990void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
991 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000992 Register array = ToRegister(instr->InputAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000993 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000994}
995
996
997void LCodeGen::DoValueOf(LValueOf* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000998 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000999 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001000 Register map = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001001 ASSERT(input.is(result));
1002 NearLabel done;
1003 // If the object is a smi return the object.
1004 __ test(input, Immediate(kSmiTagMask));
1005 __ j(zero, &done);
1006
1007 // If the object is not a value type, return the object.
1008 __ CmpObjectType(input, JS_VALUE_TYPE, map);
1009 __ j(not_equal, &done);
1010 __ mov(result, FieldOperand(input, JSValue::kValueOffset));
1011
1012 __ bind(&done);
1013}
1014
1015
1016void LCodeGen::DoBitNotI(LBitNotI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001017 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001018 ASSERT(input->Equals(instr->result()));
1019 __ not_(ToRegister(input));
1020}
1021
1022
1023void LCodeGen::DoThrow(LThrow* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001024 __ push(ToOperand(instr->InputAt(0)));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001025 CallRuntime(Runtime::kThrow, 1, instr);
1026
1027 if (FLAG_debug_code) {
1028 Comment("Unreachable code.");
1029 __ int3();
1030 }
1031}
1032
1033
1034void LCodeGen::DoAddI(LAddI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001035 LOperand* left = instr->InputAt(0);
1036 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001037 ASSERT(left->Equals(instr->result()));
1038
1039 if (right->IsConstantOperand()) {
1040 __ add(ToOperand(left), ToImmediate(right));
1041 } else {
1042 __ add(ToRegister(left), ToOperand(right));
1043 }
1044
1045 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1046 DeoptimizeIf(overflow, instr->environment());
1047 }
1048}
1049
1050
1051void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001052 LOperand* left = instr->InputAt(0);
1053 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001054 // Modulo uses a fixed result register.
1055 ASSERT(instr->op() == Token::MOD || left->Equals(instr->result()));
1056 switch (instr->op()) {
1057 case Token::ADD:
1058 __ addsd(ToDoubleRegister(left), ToDoubleRegister(right));
1059 break;
1060 case Token::SUB:
1061 __ subsd(ToDoubleRegister(left), ToDoubleRegister(right));
1062 break;
1063 case Token::MUL:
1064 __ mulsd(ToDoubleRegister(left), ToDoubleRegister(right));
1065 break;
1066 case Token::DIV:
1067 __ divsd(ToDoubleRegister(left), ToDoubleRegister(right));
1068 break;
1069 case Token::MOD: {
1070 // Pass two doubles as arguments on the stack.
1071 __ PrepareCallCFunction(4, eax);
1072 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
1073 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
1074 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4);
1075
1076 // Return value is in st(0) on ia32.
1077 // Store it into the (fixed) result register.
1078 __ sub(Operand(esp), Immediate(kDoubleSize));
1079 __ fstp_d(Operand(esp, 0));
1080 __ movdbl(ToDoubleRegister(instr->result()), Operand(esp, 0));
1081 __ add(Operand(esp), Immediate(kDoubleSize));
1082 break;
1083 }
1084 default:
1085 UNREACHABLE();
1086 break;
1087 }
1088}
1089
1090
1091void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001092 ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1093 ASSERT(ToRegister(instr->InputAt(1)).is(eax));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001094 ASSERT(ToRegister(instr->result()).is(eax));
1095
1096 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1097 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1098}
1099
1100
1101int LCodeGen::GetNextEmittedBlock(int block) {
1102 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1103 LLabel* label = chunk_->GetLabel(i);
1104 if (!label->HasReplacement()) return i;
1105 }
1106 return -1;
1107}
1108
1109
1110void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1111 int next_block = GetNextEmittedBlock(current_block_);
1112 right_block = chunk_->LookupDestination(right_block);
1113 left_block = chunk_->LookupDestination(left_block);
1114
1115 if (right_block == left_block) {
1116 EmitGoto(left_block);
1117 } else if (left_block == next_block) {
1118 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1119 } else if (right_block == next_block) {
1120 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1121 } else {
1122 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1123 __ jmp(chunk_->GetAssemblyLabel(right_block));
1124 }
1125}
1126
1127
1128void LCodeGen::DoBranch(LBranch* instr) {
1129 int true_block = chunk_->LookupDestination(instr->true_block_id());
1130 int false_block = chunk_->LookupDestination(instr->false_block_id());
1131
1132 Representation r = instr->hydrogen()->representation();
1133 if (r.IsInteger32()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001134 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001135 __ test(reg, Operand(reg));
1136 EmitBranch(true_block, false_block, not_zero);
1137 } else if (r.IsDouble()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001138 XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001139 __ xorpd(xmm0, xmm0);
1140 __ ucomisd(reg, xmm0);
1141 EmitBranch(true_block, false_block, not_equal);
1142 } else {
1143 ASSERT(r.IsTagged());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001144 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001145 if (instr->hydrogen()->type().IsBoolean()) {
1146 __ cmp(reg, Factory::true_value());
1147 EmitBranch(true_block, false_block, equal);
1148 } else {
1149 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1150 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1151
1152 __ cmp(reg, Factory::undefined_value());
1153 __ j(equal, false_label);
1154 __ cmp(reg, Factory::true_value());
1155 __ j(equal, true_label);
1156 __ cmp(reg, Factory::false_value());
1157 __ j(equal, false_label);
1158 __ test(reg, Operand(reg));
1159 __ j(equal, false_label);
1160 __ test(reg, Immediate(kSmiTagMask));
1161 __ j(zero, true_label);
1162
1163 // Test for double values. Zero is false.
1164 NearLabel call_stub;
1165 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1166 Factory::heap_number_map());
1167 __ j(not_equal, &call_stub);
1168 __ fldz();
1169 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1170 __ FCmp();
1171 __ j(zero, false_label);
1172 __ jmp(true_label);
1173
1174 // The conversion stub doesn't cause garbage collections so it's
1175 // safe to not record a safepoint after the call.
1176 __ bind(&call_stub);
1177 ToBooleanStub stub;
1178 __ pushad();
1179 __ push(reg);
1180 __ CallStub(&stub);
1181 __ test(eax, Operand(eax));
1182 __ popad();
1183 EmitBranch(true_block, false_block, not_zero);
1184 }
1185 }
1186}
1187
1188
1189void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1190 block = chunk_->LookupDestination(block);
1191 int next_block = GetNextEmittedBlock(current_block_);
1192 if (block != next_block) {
1193 // Perform stack overflow check if this goto needs it before jumping.
1194 if (deferred_stack_check != NULL) {
1195 ExternalReference stack_limit =
1196 ExternalReference::address_of_stack_limit();
1197 __ cmp(esp, Operand::StaticVariable(stack_limit));
1198 __ j(above_equal, chunk_->GetAssemblyLabel(block));
1199 __ jmp(deferred_stack_check->entry());
1200 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1201 } else {
1202 __ jmp(chunk_->GetAssemblyLabel(block));
1203 }
1204 }
1205}
1206
1207
1208void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1209 __ pushad();
1210 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1211 RecordSafepointWithRegisters(
1212 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1213 __ popad();
1214}
1215
1216void LCodeGen::DoGoto(LGoto* instr) {
1217 class DeferredStackCheck: public LDeferredCode {
1218 public:
1219 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1220 : LDeferredCode(codegen), instr_(instr) { }
1221 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1222 private:
1223 LGoto* instr_;
1224 };
1225
1226 DeferredStackCheck* deferred = NULL;
1227 if (instr->include_stack_check()) {
1228 deferred = new DeferredStackCheck(this, instr);
1229 }
1230 EmitGoto(instr->block_id(), deferred);
1231}
1232
1233
1234Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1235 Condition cond = no_condition;
1236 switch (op) {
1237 case Token::EQ:
1238 case Token::EQ_STRICT:
1239 cond = equal;
1240 break;
1241 case Token::LT:
1242 cond = is_unsigned ? below : less;
1243 break;
1244 case Token::GT:
1245 cond = is_unsigned ? above : greater;
1246 break;
1247 case Token::LTE:
1248 cond = is_unsigned ? below_equal : less_equal;
1249 break;
1250 case Token::GTE:
1251 cond = is_unsigned ? above_equal : greater_equal;
1252 break;
1253 case Token::IN:
1254 case Token::INSTANCEOF:
1255 default:
1256 UNREACHABLE();
1257 }
1258 return cond;
1259}
1260
1261
1262void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1263 if (right->IsConstantOperand()) {
1264 __ cmp(ToOperand(left), ToImmediate(right));
1265 } else {
1266 __ cmp(ToRegister(left), ToOperand(right));
1267 }
1268}
1269
1270
1271void LCodeGen::DoCmpID(LCmpID* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001272 LOperand* left = instr->InputAt(0);
1273 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001274 LOperand* result = instr->result();
1275
1276 NearLabel unordered;
1277 if (instr->is_double()) {
1278 // Don't base result on EFLAGS when a NaN is involved. Instead
1279 // jump to the unordered case, which produces a false value.
1280 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1281 __ j(parity_even, &unordered, not_taken);
1282 } else {
1283 EmitCmpI(left, right);
1284 }
1285
1286 NearLabel done;
1287 Condition cc = TokenToCondition(instr->op(), instr->is_double());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001288 __ mov(ToRegister(result), Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001289 __ j(cc, &done);
1290
1291 __ bind(&unordered);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001292 __ mov(ToRegister(result), Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001293 __ bind(&done);
1294}
1295
1296
1297void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001298 LOperand* left = instr->InputAt(0);
1299 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001300 int false_block = chunk_->LookupDestination(instr->false_block_id());
1301 int true_block = chunk_->LookupDestination(instr->true_block_id());
1302
1303 if (instr->is_double()) {
1304 // Don't base result on EFLAGS when a NaN is involved. Instead
1305 // jump to the false block.
1306 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1307 __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
1308 } else {
1309 EmitCmpI(left, right);
1310 }
1311
1312 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1313 EmitBranch(true_block, false_block, cc);
1314}
1315
1316
1317void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001318 Register left = ToRegister(instr->InputAt(0));
1319 Register right = ToRegister(instr->InputAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001320 Register result = ToRegister(instr->result());
1321
1322 __ cmp(left, Operand(right));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001323 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001324 NearLabel done;
1325 __ j(equal, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001326 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001327 __ bind(&done);
1328}
1329
1330
1331void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001332 Register left = ToRegister(instr->InputAt(0));
1333 Register right = ToRegister(instr->InputAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001334 int false_block = chunk_->LookupDestination(instr->false_block_id());
1335 int true_block = chunk_->LookupDestination(instr->true_block_id());
1336
1337 __ cmp(left, Operand(right));
1338 EmitBranch(true_block, false_block, equal);
1339}
1340
1341
1342void LCodeGen::DoIsNull(LIsNull* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001343 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001344 Register result = ToRegister(instr->result());
1345
1346 // TODO(fsc): If the expression is known to be a smi, then it's
1347 // definitely not null. Materialize false.
1348
1349 __ cmp(reg, Factory::null_value());
1350 if (instr->is_strict()) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001351 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001352 NearLabel done;
1353 __ j(equal, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001354 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001355 __ bind(&done);
1356 } else {
1357 NearLabel true_value, false_value, done;
1358 __ j(equal, &true_value);
1359 __ cmp(reg, Factory::undefined_value());
1360 __ j(equal, &true_value);
1361 __ test(reg, Immediate(kSmiTagMask));
1362 __ j(zero, &false_value);
1363 // Check for undetectable objects by looking in the bit field in
1364 // the map. The object has already been smi checked.
1365 Register scratch = result;
1366 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1367 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1368 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1369 __ j(not_zero, &true_value);
1370 __ bind(&false_value);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001371 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001372 __ jmp(&done);
1373 __ bind(&true_value);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001374 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001375 __ bind(&done);
1376 }
1377}
1378
1379
1380void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001381 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001382
1383 // TODO(fsc): If the expression is known to be a smi, then it's
1384 // definitely not null. Jump to the false block.
1385
1386 int true_block = chunk_->LookupDestination(instr->true_block_id());
1387 int false_block = chunk_->LookupDestination(instr->false_block_id());
1388
1389 __ cmp(reg, Factory::null_value());
1390 if (instr->is_strict()) {
1391 EmitBranch(true_block, false_block, equal);
1392 } else {
1393 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1394 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1395 __ j(equal, true_label);
1396 __ cmp(reg, Factory::undefined_value());
1397 __ j(equal, true_label);
1398 __ test(reg, Immediate(kSmiTagMask));
1399 __ j(zero, false_label);
1400 // Check for undetectable objects by looking in the bit field in
1401 // the map. The object has already been smi checked.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001402 Register scratch = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001403 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1404 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1405 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1406 EmitBranch(true_block, false_block, not_zero);
1407 }
1408}
1409
1410
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001411Condition LCodeGen::EmitIsObject(Register input,
1412 Register temp1,
1413 Register temp2,
1414 Label* is_not_object,
1415 Label* is_object) {
1416 ASSERT(!input.is(temp1));
1417 ASSERT(!input.is(temp2));
1418 ASSERT(!temp1.is(temp2));
1419
1420 __ test(input, Immediate(kSmiTagMask));
1421 __ j(equal, is_not_object);
1422
1423 __ cmp(input, Factory::null_value());
1424 __ j(equal, is_object);
1425
1426 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1427 // Undetectable objects behave like undefined.
1428 __ movzx_b(temp2, FieldOperand(temp1, Map::kBitFieldOffset));
1429 __ test(temp2, Immediate(1 << Map::kIsUndetectable));
1430 __ j(not_zero, is_not_object);
1431
1432 __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset));
1433 __ cmp(temp2, FIRST_JS_OBJECT_TYPE);
1434 __ j(below, is_not_object);
1435 __ cmp(temp2, LAST_JS_OBJECT_TYPE);
1436 return below_equal;
1437}
1438
1439
1440void LCodeGen::DoIsObject(LIsObject* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001441 Register reg = ToRegister(instr->InputAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001442 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001443 Register temp = ToRegister(instr->TempAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001444 Label is_false, is_true, done;
1445
1446 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1447 __ j(true_cond, &is_true);
1448
1449 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001450 __ mov(result, Factory::false_value());
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001451 __ jmp(&done);
1452
1453 __ bind(&is_true);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001454 __ mov(result, Factory::true_value());
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001455
1456 __ bind(&done);
1457}
1458
1459
1460void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001461 Register reg = ToRegister(instr->InputAt(0));
1462 Register temp = ToRegister(instr->TempAt(0));
1463 Register temp2 = ToRegister(instr->TempAt(1));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001464
1465 int true_block = chunk_->LookupDestination(instr->true_block_id());
1466 int false_block = chunk_->LookupDestination(instr->false_block_id());
1467 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1468 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1469
1470 Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label);
1471
1472 EmitBranch(true_block, false_block, true_cond);
1473}
1474
1475
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001476void LCodeGen::DoIsSmi(LIsSmi* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001477 Operand input = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001478 Register result = ToRegister(instr->result());
1479
1480 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1481 __ test(input, Immediate(kSmiTagMask));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001482 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001483 NearLabel done;
1484 __ j(zero, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001485 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001486 __ bind(&done);
1487}
1488
1489
1490void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001491 Operand input = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001492
1493 int true_block = chunk_->LookupDestination(instr->true_block_id());
1494 int false_block = chunk_->LookupDestination(instr->false_block_id());
1495
1496 __ test(input, Immediate(kSmiTagMask));
1497 EmitBranch(true_block, false_block, zero);
1498}
1499
1500
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001501static InstanceType TestType(HHasInstanceType* instr) {
1502 InstanceType from = instr->from();
1503 InstanceType to = instr->to();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001504 if (from == FIRST_TYPE) return to;
1505 ASSERT(from == to || to == LAST_TYPE);
1506 return from;
1507}
1508
1509
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001510static Condition BranchCondition(HHasInstanceType* instr) {
1511 InstanceType from = instr->from();
1512 InstanceType to = instr->to();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001513 if (from == to) return equal;
1514 if (to == LAST_TYPE) return above_equal;
1515 if (from == FIRST_TYPE) return below_equal;
1516 UNREACHABLE();
1517 return equal;
1518}
1519
1520
1521void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001522 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001523 Register result = ToRegister(instr->result());
1524
1525 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1526 __ test(input, Immediate(kSmiTagMask));
1527 NearLabel done, is_false;
1528 __ j(zero, &is_false);
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001529 __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1530 __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001531 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001532 __ jmp(&done);
1533 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001534 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001535 __ bind(&done);
1536}
1537
1538
1539void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001540 Register input = ToRegister(instr->InputAt(0));
1541 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001542
1543 int true_block = chunk_->LookupDestination(instr->true_block_id());
1544 int false_block = chunk_->LookupDestination(instr->false_block_id());
1545
1546 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1547
1548 __ test(input, Immediate(kSmiTagMask));
1549 __ j(zero, false_label);
1550
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001551 __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1552 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001553}
1554
1555
1556void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001557 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001558 Register result = ToRegister(instr->result());
1559
1560 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001561 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001562 __ test(FieldOperand(input, String::kHashFieldOffset),
1563 Immediate(String::kContainsCachedArrayIndexMask));
1564 NearLabel done;
1565 __ j(not_zero, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001566 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001567 __ bind(&done);
1568}
1569
1570
1571void LCodeGen::DoHasCachedArrayIndexAndBranch(
1572 LHasCachedArrayIndexAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001573 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001574
1575 int true_block = chunk_->LookupDestination(instr->true_block_id());
1576 int false_block = chunk_->LookupDestination(instr->false_block_id());
1577
1578 __ test(FieldOperand(input, String::kHashFieldOffset),
1579 Immediate(String::kContainsCachedArrayIndexMask));
1580 EmitBranch(true_block, false_block, not_equal);
1581}
1582
1583
1584// Branches to a label or falls through with the answer in the z flag. Trashes
1585// the temp registers, but not the input. Only input and temp2 may alias.
1586void LCodeGen::EmitClassOfTest(Label* is_true,
1587 Label* is_false,
1588 Handle<String>class_name,
1589 Register input,
1590 Register temp,
1591 Register temp2) {
1592 ASSERT(!input.is(temp));
1593 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1594 __ test(input, Immediate(kSmiTagMask));
1595 __ j(zero, is_false);
1596 __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1597 __ j(below, is_false);
1598
1599 // Map is now in temp.
1600 // Functions have class 'Function'.
1601 __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1602 if (class_name->IsEqualTo(CStrVector("Function"))) {
1603 __ j(equal, is_true);
1604 } else {
1605 __ j(equal, is_false);
1606 }
1607
1608 // Check if the constructor in the map is a function.
1609 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1610
1611 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1612 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1613 // LAST_JS_OBJECT_TYPE.
1614 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1615 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1616
1617 // Objects with a non-function constructor have class 'Object'.
1618 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1619 if (class_name->IsEqualTo(CStrVector("Object"))) {
1620 __ j(not_equal, is_true);
1621 } else {
1622 __ j(not_equal, is_false);
1623 }
1624
1625 // temp now contains the constructor function. Grab the
1626 // instance class name from there.
1627 __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1628 __ mov(temp, FieldOperand(temp,
1629 SharedFunctionInfo::kInstanceClassNameOffset));
1630 // The class name we are testing against is a symbol because it's a literal.
1631 // The name in the constructor is a symbol because of the way the context is
1632 // booted. This routine isn't expected to work for random API-created
1633 // classes and it doesn't have to because you can't access it with natives
1634 // syntax. Since both sides are symbols it is sufficient to use an identity
1635 // comparison.
1636 __ cmp(temp, class_name);
1637 // End with the answer in the z flag.
1638}
1639
1640
1641void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001642 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001643 Register result = ToRegister(instr->result());
1644 ASSERT(input.is(result));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001645 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001646 Handle<String> class_name = instr->hydrogen()->class_name();
1647 NearLabel done;
1648 Label is_true, is_false;
1649
1650 EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input);
1651
1652 __ j(not_equal, &is_false);
1653
1654 __ bind(&is_true);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001655 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001656 __ jmp(&done);
1657
1658 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001659 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001660 __ bind(&done);
1661}
1662
1663
1664void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001665 Register input = ToRegister(instr->InputAt(0));
1666 Register temp = ToRegister(instr->TempAt(0));
1667 Register temp2 = ToRegister(instr->TempAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001668 if (input.is(temp)) {
1669 // Swap.
1670 Register swapper = temp;
1671 temp = temp2;
1672 temp2 = swapper;
1673 }
1674 Handle<String> class_name = instr->hydrogen()->class_name();
1675
1676 int true_block = chunk_->LookupDestination(instr->true_block_id());
1677 int false_block = chunk_->LookupDestination(instr->false_block_id());
1678
1679 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1680 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1681
1682 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1683
1684 EmitBranch(true_block, false_block, equal);
1685}
1686
1687
1688void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001689 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001690 int true_block = instr->true_block_id();
1691 int false_block = instr->false_block_id();
1692
1693 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1694 EmitBranch(true_block, false_block, equal);
1695}
1696
1697
1698void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001699 // Object and function are in fixed registers defined by the stub.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001700 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001701 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1702
1703 NearLabel true_value, done;
1704 __ test(eax, Operand(eax));
1705 __ j(zero, &true_value);
1706 __ mov(ToRegister(instr->result()), Factory::false_value());
1707 __ jmp(&done);
1708 __ bind(&true_value);
1709 __ mov(ToRegister(instr->result()), Factory::true_value());
1710 __ bind(&done);
1711}
1712
1713
1714void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1715 int true_block = chunk_->LookupDestination(instr->true_block_id());
1716 int false_block = chunk_->LookupDestination(instr->false_block_id());
1717
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001718 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001719 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1720 __ test(eax, Operand(eax));
1721 EmitBranch(true_block, false_block, zero);
1722}
1723
1724
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001725void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1726 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1727 public:
1728 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1729 LInstanceOfKnownGlobal* instr)
1730 : LDeferredCode(codegen), instr_(instr) { }
1731 virtual void Generate() {
1732 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1733 }
1734
1735 Label* map_check() { return &map_check_; }
1736
1737 private:
1738 LInstanceOfKnownGlobal* instr_;
1739 Label map_check_;
1740 };
1741
1742 DeferredInstanceOfKnownGlobal* deferred;
1743 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1744
1745 Label done, false_result;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001746 Register object = ToRegister(instr->InputAt(0));
1747 Register temp = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001748
1749 // A Smi is not instance of anything.
1750 __ test(object, Immediate(kSmiTagMask));
1751 __ j(zero, &false_result, not_taken);
1752
1753 // This is the inlined call site instanceof cache. The two occourences of the
1754 // hole value will be patched to the last map/result pair generated by the
1755 // instanceof stub.
1756 NearLabel cache_miss;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001757 Register map = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001758 __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
1759 __ bind(deferred->map_check()); // Label for calculating code patching.
1760 __ cmp(map, Factory::the_hole_value()); // Patched to cached map.
1761 __ j(not_equal, &cache_miss, not_taken);
1762 __ mov(eax, Factory::the_hole_value()); // Patched to either true or false.
1763 __ jmp(&done);
1764
1765 // The inlined call site cache did not match. Check null and string before
1766 // calling the deferred code.
1767 __ bind(&cache_miss);
1768 // Null is not instance of anything.
1769 __ cmp(object, Factory::null_value());
1770 __ j(equal, &false_result);
1771
1772 // String values are not instances of anything.
1773 Condition is_string = masm_->IsObjectStringType(object, temp, temp);
1774 __ j(is_string, &false_result);
1775
1776 // Go to the deferred code.
1777 __ jmp(deferred->entry());
1778
1779 __ bind(&false_result);
1780 __ mov(ToRegister(instr->result()), Factory::false_value());
1781
1782 // Here result has either true or false. Deferred code also produces true or
1783 // false object.
1784 __ bind(deferred->exit());
1785 __ bind(&done);
1786}
1787
1788
1789void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1790 Label* map_check) {
1791 __ PushSafepointRegisters();
1792
1793 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1794 flags = static_cast<InstanceofStub::Flags>(
1795 flags | InstanceofStub::kArgsInRegisters);
1796 flags = static_cast<InstanceofStub::Flags>(
1797 flags | InstanceofStub::kCallSiteInlineCheck);
1798 flags = static_cast<InstanceofStub::Flags>(
1799 flags | InstanceofStub::kReturnTrueFalseObject);
1800 InstanceofStub stub(flags);
1801
1802 // Get the temp register reserved by the instruction. This needs to be edi as
1803 // its slot of the pushing of safepoint registers is used to communicate the
1804 // offset to the location of the map check.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001805 Register temp = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001806 ASSERT(temp.is(edi));
1807 __ mov(InstanceofStub::right(), Immediate(instr->function()));
1808 static const int kAdditionalDelta = 13;
1809 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1810 Label before_push_delta;
1811 __ bind(&before_push_delta);
1812 __ mov(temp, Immediate(delta));
1813 __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp);
1814 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
1815 ASSERT_EQ(kAdditionalDelta,
1816 masm_->SizeOfCodeGeneratedSince(&before_push_delta));
1817 RecordSafepointWithRegisters(
1818 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1819 // Put the result value into the eax slot and restore all registers.
1820 __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax);
1821
1822 __ PopSafepointRegisters();
1823}
1824
1825
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001826static Condition ComputeCompareCondition(Token::Value op) {
1827 switch (op) {
1828 case Token::EQ_STRICT:
1829 case Token::EQ:
1830 return equal;
1831 case Token::LT:
1832 return less;
1833 case Token::GT:
1834 return greater;
1835 case Token::LTE:
1836 return less_equal;
1837 case Token::GTE:
1838 return greater_equal;
1839 default:
1840 UNREACHABLE();
1841 return no_condition;
1842 }
1843}
1844
1845
1846void LCodeGen::DoCmpT(LCmpT* instr) {
1847 Token::Value op = instr->op();
1848
1849 Handle<Code> ic = CompareIC::GetUninitialized(op);
1850 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1851
1852 Condition condition = ComputeCompareCondition(op);
1853 if (op == Token::GT || op == Token::LTE) {
1854 condition = ReverseCondition(condition);
1855 }
1856 NearLabel true_value, done;
1857 __ test(eax, Operand(eax));
1858 __ j(condition, &true_value);
1859 __ mov(ToRegister(instr->result()), Factory::false_value());
1860 __ jmp(&done);
1861 __ bind(&true_value);
1862 __ mov(ToRegister(instr->result()), Factory::true_value());
1863 __ bind(&done);
1864}
1865
1866
1867void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1868 Token::Value op = instr->op();
1869 int true_block = chunk_->LookupDestination(instr->true_block_id());
1870 int false_block = chunk_->LookupDestination(instr->false_block_id());
1871
1872 Handle<Code> ic = CompareIC::GetUninitialized(op);
1873 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1874
1875 // The compare stub expects compare condition and the input operands
1876 // reversed for GT and LTE.
1877 Condition condition = ComputeCompareCondition(op);
1878 if (op == Token::GT || op == Token::LTE) {
1879 condition = ReverseCondition(condition);
1880 }
1881 __ test(eax, Operand(eax));
1882 EmitBranch(true_block, false_block, condition);
1883}
1884
1885
1886void LCodeGen::DoReturn(LReturn* instr) {
1887 if (FLAG_trace) {
1888 // Preserve the return value on the stack and rely on the runtime
1889 // call to return the value in the same register.
1890 __ push(eax);
1891 __ CallRuntime(Runtime::kTraceExit, 1);
1892 }
1893 __ mov(esp, ebp);
1894 __ pop(ebp);
1895 __ ret((ParameterCount() + 1) * kPointerSize);
1896}
1897
1898
1899void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
1900 Register result = ToRegister(instr->result());
1901 __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
1902 if (instr->hydrogen()->check_hole_value()) {
1903 __ cmp(result, Factory::the_hole_value());
1904 DeoptimizeIf(equal, instr->environment());
1905 }
1906}
1907
1908
1909void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001910 Register value = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001911 __ mov(Operand::Cell(instr->hydrogen()->cell()), value);
1912}
1913
1914
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001915void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
1916 // TODO(antonm): load a context with a separate instruction.
1917 Register result = ToRegister(instr->result());
1918 __ LoadContext(result, instr->context_chain_length());
1919 __ mov(result, ContextOperand(result, instr->slot_index()));
1920}
1921
1922
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001923void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001924 Register object = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001925 Register result = ToRegister(instr->result());
1926 if (instr->hydrogen()->is_in_object()) {
1927 __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
1928 } else {
1929 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
1930 __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
1931 }
1932}
1933
1934
1935void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1936 ASSERT(ToRegister(instr->object()).is(eax));
1937 ASSERT(ToRegister(instr->result()).is(eax));
1938
1939 __ mov(ecx, instr->name());
1940 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1941 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1942}
1943
1944
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +00001945void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
1946 Register function = ToRegister(instr->function());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001947 Register temp = ToRegister(instr->TempAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +00001948 Register result = ToRegister(instr->result());
1949
1950 // Check that the function really is a function.
1951 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
1952 DeoptimizeIf(not_equal, instr->environment());
1953
1954 // Check whether the function has an instance prototype.
1955 NearLabel non_instance;
1956 __ test_b(FieldOperand(result, Map::kBitFieldOffset),
1957 1 << Map::kHasNonInstancePrototype);
1958 __ j(not_zero, &non_instance);
1959
1960 // Get the prototype or initial map from the function.
1961 __ mov(result,
1962 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1963
1964 // Check that the function has a prototype or an initial map.
1965 __ cmp(Operand(result), Immediate(Factory::the_hole_value()));
1966 DeoptimizeIf(equal, instr->environment());
1967
1968 // If the function does not have an initial map, we're done.
1969 NearLabel done;
1970 __ CmpObjectType(result, MAP_TYPE, temp);
1971 __ j(not_equal, &done);
1972
1973 // Get the prototype from the initial map.
1974 __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
1975 __ jmp(&done);
1976
1977 // Non-instance prototype: Fetch prototype from constructor field
1978 // in the function's map.
1979 __ bind(&non_instance);
1980 __ mov(result, FieldOperand(result, Map::kConstructorOffset));
1981
1982 // All done.
1983 __ bind(&done);
1984}
1985
1986
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001987void LCodeGen::DoLoadElements(LLoadElements* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001988 ASSERT(instr->result()->Equals(instr->InputAt(0)));
1989 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001990 __ mov(reg, FieldOperand(reg, JSObject::kElementsOffset));
1991 if (FLAG_debug_code) {
1992 NearLabel done;
1993 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1994 Immediate(Factory::fixed_array_map()));
1995 __ j(equal, &done);
1996 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1997 Immediate(Factory::fixed_cow_array_map()));
1998 __ Check(equal, "Check for fast elements failed.");
1999 __ bind(&done);
2000 }
2001}
2002
2003
2004void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2005 Register arguments = ToRegister(instr->arguments());
2006 Register length = ToRegister(instr->length());
2007 Operand index = ToOperand(instr->index());
2008 Register result = ToRegister(instr->result());
2009
2010 __ sub(length, index);
2011 DeoptimizeIf(below_equal, instr->environment());
2012
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002013 // There are two words between the frame pointer and the last argument.
2014 // Subtracting from length accounts for one of them add one more.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002015 __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2016}
2017
2018
2019void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2020 Register elements = ToRegister(instr->elements());
2021 Register key = ToRegister(instr->key());
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00002022 Register result = ToRegister(instr->result());
2023 ASSERT(result.is(elements));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002024
2025 // Load the result.
2026 __ mov(result, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize));
2027
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00002028 // Check for the hole value.
2029 __ cmp(result, Factory::the_hole_value());
2030 DeoptimizeIf(equal, instr->environment());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002031}
2032
2033
2034void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2035 ASSERT(ToRegister(instr->object()).is(edx));
2036 ASSERT(ToRegister(instr->key()).is(eax));
2037
2038 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2039 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2040}
2041
2042
2043void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2044 Register result = ToRegister(instr->result());
2045
2046 // Check for arguments adapter frame.
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002047 NearLabel done, adapted;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002048 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2049 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
2050 __ cmp(Operand(result),
2051 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2052 __ j(equal, &adapted);
2053
2054 // No arguments adaptor frame.
2055 __ mov(result, Operand(ebp));
2056 __ jmp(&done);
2057
2058 // Arguments adaptor frame present.
2059 __ bind(&adapted);
2060 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2061
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002062 // Result is the frame pointer for the frame if not adapted and for the real
2063 // frame below the adaptor frame if adapted.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002064 __ bind(&done);
2065}
2066
2067
2068void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002069 Operand elem = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002070 Register result = ToRegister(instr->result());
2071
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002072 NearLabel done;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002073
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002074 // If no arguments adaptor frame the number of arguments is fixed.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002075 __ cmp(ebp, elem);
2076 __ mov(result, Immediate(scope()->num_parameters()));
2077 __ j(equal, &done);
2078
2079 // Arguments adaptor frame present. Get argument length from there.
2080 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2081 __ mov(result, Operand(result,
2082 ArgumentsAdaptorFrameConstants::kLengthOffset));
2083 __ SmiUntag(result);
2084
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002085 // Argument length is in result register.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002086 __ bind(&done);
2087}
2088
2089
2090void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2091 Register receiver = ToRegister(instr->receiver());
2092 ASSERT(ToRegister(instr->function()).is(edi));
2093 ASSERT(ToRegister(instr->result()).is(eax));
2094
2095 // If the receiver is null or undefined, we have to pass the
2096 // global object as a receiver.
2097 NearLabel global_receiver, receiver_ok;
2098 __ cmp(receiver, Factory::null_value());
2099 __ j(equal, &global_receiver);
2100 __ cmp(receiver, Factory::undefined_value());
2101 __ j(not_equal, &receiver_ok);
2102 __ bind(&global_receiver);
2103 __ mov(receiver, GlobalObjectOperand());
2104 __ bind(&receiver_ok);
2105
2106 Register length = ToRegister(instr->length());
2107 Register elements = ToRegister(instr->elements());
2108
2109 Label invoke;
2110
2111 // Copy the arguments to this function possibly from the
2112 // adaptor frame below it.
2113 const uint32_t kArgumentsLimit = 1 * KB;
2114 __ cmp(length, kArgumentsLimit);
2115 DeoptimizeIf(above, instr->environment());
2116
2117 __ push(receiver);
2118 __ mov(receiver, length);
2119
2120 // Loop through the arguments pushing them onto the execution
2121 // stack.
2122 Label loop;
2123 // length is a small non-negative integer, due to the test above.
2124 __ test(length, Operand(length));
2125 __ j(zero, &invoke);
2126 __ bind(&loop);
2127 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2128 __ dec(length);
2129 __ j(not_zero, &loop);
2130
2131 // Invoke the function.
2132 __ bind(&invoke);
2133 ASSERT(receiver.is(eax));
2134 v8::internal::ParameterCount actual(eax);
2135 SafepointGenerator safepoint_generator(this,
2136 instr->pointer_map(),
2137 Safepoint::kNoDeoptimizationIndex);
2138 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator);
2139}
2140
2141
2142void LCodeGen::DoPushArgument(LPushArgument* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002143 LOperand* argument = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002144 if (argument->IsConstantOperand()) {
2145 __ push(ToImmediate(argument));
2146 } else {
2147 __ push(ToOperand(argument));
2148 }
2149}
2150
2151
2152void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2153 Register result = ToRegister(instr->result());
2154 __ mov(result, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2155}
2156
2157
2158void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2159 Register result = ToRegister(instr->result());
2160 __ mov(result, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2161 __ mov(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset));
2162}
2163
2164
2165void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2166 int arity,
2167 LInstruction* instr) {
2168 // Change context if needed.
2169 bool change_context =
2170 (graph()->info()->closure()->context() != function->context()) ||
2171 scope()->contains_with() ||
2172 (scope()->num_heap_slots() > 0);
2173 if (change_context) {
2174 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2175 }
2176
2177 // Set eax to arguments count if adaption is not needed. Assumes that eax
2178 // is available to write to at this point.
2179 if (!function->NeedsArgumentsAdaption()) {
2180 __ mov(eax, arity);
2181 }
2182
2183 LPointerMap* pointers = instr->pointer_map();
2184 RecordPosition(pointers->position());
2185
2186 // Invoke function.
2187 if (*function == *graph()->info()->closure()) {
2188 __ CallSelf();
2189 } else {
2190 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2191 }
2192
2193 // Setup deoptimization.
2194 RegisterLazyDeoptimization(instr);
2195
2196 // Restore context.
2197 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2198}
2199
2200
2201void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2202 ASSERT(ToRegister(instr->result()).is(eax));
2203 __ mov(edi, instr->function());
2204 CallKnownFunction(instr->function(), instr->arity(), instr);
2205}
2206
2207
2208void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002209 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002210 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2211 Factory::heap_number_map());
2212 DeoptimizeIf(not_equal, instr->environment());
2213
2214 Label done;
2215 Register tmp = input_reg.is(eax) ? ecx : eax;
2216 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2217
2218 // Preserve the value of all registers.
2219 __ PushSafepointRegisters();
2220
2221 Label negative;
2222 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002223 // Check the sign of the argument. If the argument is positive, just
2224 // return it. We do not need to patch the stack since |input| and
2225 // |result| are the same register and |input| will be restored
2226 // unchanged by popping safepoint registers.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002227 __ test(tmp, Immediate(HeapNumber::kSignMask));
2228 __ j(not_zero, &negative);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002229 __ jmp(&done);
2230
2231 __ bind(&negative);
2232
2233 Label allocated, slow;
2234 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2235 __ jmp(&allocated);
2236
2237 // Slow case: Call the runtime system to do the number allocation.
2238 __ bind(&slow);
2239
2240 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2241 RecordSafepointWithRegisters(
2242 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2243 // Set the pointer to the new heap number in tmp.
2244 if (!tmp.is(eax)) __ mov(tmp, eax);
2245
2246 // Restore input_reg after call to runtime.
2247 __ mov(input_reg, Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize));
2248
2249 __ bind(&allocated);
2250 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2251 __ and_(tmp2, ~HeapNumber::kSignMask);
2252 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2253 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2254 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002255 __ mov(Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize), tmp);
2256
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002257 __ bind(&done);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002258 __ PopSafepointRegisters();
2259}
2260
2261
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002262void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2263 Register input_reg = ToRegister(instr->InputAt(0));
2264 __ test(input_reg, Operand(input_reg));
2265 Label is_positive;
2266 __ j(not_sign, &is_positive);
2267 __ neg(input_reg);
2268 __ test(input_reg, Operand(input_reg));
2269 DeoptimizeIf(negative, instr->environment());
2270 __ bind(&is_positive);
2271}
2272
2273
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002274void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2275 // Class for deferred case.
2276 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2277 public:
2278 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2279 LUnaryMathOperation* instr)
2280 : LDeferredCode(codegen), instr_(instr) { }
2281 virtual void Generate() {
2282 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2283 }
2284 private:
2285 LUnaryMathOperation* instr_;
2286 };
2287
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002288 ASSERT(instr->InputAt(0)->Equals(instr->result()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002289 Representation r = instr->hydrogen()->value()->representation();
2290
2291 if (r.IsDouble()) {
2292 XMMRegister scratch = xmm0;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002293 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002294 __ pxor(scratch, scratch);
2295 __ subsd(scratch, input_reg);
2296 __ pand(input_reg, scratch);
2297 } else if (r.IsInteger32()) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002298 EmitIntegerMathAbs(instr);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002299 } else { // Tagged case.
2300 DeferredMathAbsTaggedHeapNumber* deferred =
2301 new DeferredMathAbsTaggedHeapNumber(this, instr);
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002302 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002303 // Smi check.
2304 __ test(input_reg, Immediate(kSmiTagMask));
2305 __ j(not_zero, deferred->entry());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002306 EmitIntegerMathAbs(instr);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002307 __ bind(deferred->exit());
2308 }
2309}
2310
2311
2312void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2313 XMMRegister xmm_scratch = xmm0;
2314 Register output_reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002315 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002316 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2317 __ ucomisd(input_reg, xmm_scratch);
2318
2319 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2320 DeoptimizeIf(below_equal, instr->environment());
2321 } else {
2322 DeoptimizeIf(below, instr->environment());
2323 }
2324
2325 // Use truncating instruction (OK because input is positive).
2326 __ cvttsd2si(output_reg, Operand(input_reg));
2327
2328 // Overflow is signalled with minint.
2329 __ cmp(output_reg, 0x80000000u);
2330 DeoptimizeIf(equal, instr->environment());
2331}
2332
2333
2334void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2335 XMMRegister xmm_scratch = xmm0;
2336 Register output_reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002337 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002338
2339 // xmm_scratch = 0.5
2340 ExternalReference one_half = ExternalReference::address_of_one_half();
2341 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
2342
2343 // input = input + 0.5
2344 __ addsd(input_reg, xmm_scratch);
2345
2346 // We need to return -0 for the input range [-0.5, 0[, otherwise
2347 // compute Math.floor(value + 0.5).
2348 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2349 __ ucomisd(input_reg, xmm_scratch);
2350 DeoptimizeIf(below_equal, instr->environment());
2351 } else {
2352 // If we don't need to bailout on -0, we check only bailout
2353 // on negative inputs.
2354 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2355 __ ucomisd(input_reg, xmm_scratch);
2356 DeoptimizeIf(below, instr->environment());
2357 }
2358
2359 // Compute Math.floor(value + 0.5).
2360 // Use truncating instruction (OK because input is positive).
2361 __ cvttsd2si(output_reg, Operand(input_reg));
2362
2363 // Overflow is signalled with minint.
2364 __ cmp(output_reg, 0x80000000u);
2365 DeoptimizeIf(equal, instr->environment());
2366}
2367
2368
2369void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002370 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002371 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2372 __ sqrtsd(input_reg, input_reg);
2373}
2374
2375
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002376void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2377 XMMRegister xmm_scratch = xmm0;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002378 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002379 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2380 ExternalReference negative_infinity =
2381 ExternalReference::address_of_negative_infinity();
2382 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity));
2383 __ ucomisd(xmm_scratch, input_reg);
2384 DeoptimizeIf(equal, instr->environment());
2385 __ sqrtsd(input_reg, input_reg);
2386}
2387
2388
2389void LCodeGen::DoPower(LPower* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002390 LOperand* left = instr->InputAt(0);
2391 LOperand* right = instr->InputAt(1);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002392 DoubleRegister result_reg = ToDoubleRegister(instr->result());
2393 Representation exponent_type = instr->hydrogen()->right()->representation();
2394 if (exponent_type.IsDouble()) {
2395 // It is safe to use ebx directly since the instruction is marked
2396 // as a call.
2397 __ PrepareCallCFunction(4, ebx);
2398 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2399 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
2400 __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2401 } else if (exponent_type.IsInteger32()) {
2402 // It is safe to use ebx directly since the instruction is marked
2403 // as a call.
2404 ASSERT(!ToRegister(right).is(ebx));
2405 __ PrepareCallCFunction(4, ebx);
2406 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2407 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
2408 __ CallCFunction(ExternalReference::power_double_int_function(), 4);
2409 } else {
2410 ASSERT(exponent_type.IsTagged());
2411 CpuFeatures::Scope scope(SSE2);
2412 Register right_reg = ToRegister(right);
2413
2414 Label non_smi, call;
2415 __ test(right_reg, Immediate(kSmiTagMask));
2416 __ j(not_zero, &non_smi);
2417 __ SmiUntag(right_reg);
2418 __ cvtsi2sd(result_reg, Operand(right_reg));
2419 __ jmp(&call);
2420
2421 __ bind(&non_smi);
2422 // It is safe to use ebx directly since the instruction is marked
2423 // as a call.
2424 ASSERT(!right_reg.is(ebx));
2425 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx);
2426 DeoptimizeIf(not_equal, instr->environment());
2427 __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset));
2428
2429 __ bind(&call);
2430 __ PrepareCallCFunction(4, ebx);
2431 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2432 __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
2433 __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2434 }
2435
2436 // Return value is in st(0) on ia32.
2437 // Store it into the (fixed) result register.
2438 __ sub(Operand(esp), Immediate(kDoubleSize));
2439 __ fstp_d(Operand(esp, 0));
2440 __ movdbl(result_reg, Operand(esp, 0));
2441 __ add(Operand(esp), Immediate(kDoubleSize));
2442}
2443
2444
2445void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2446 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
whesse@chromium.org023421e2010-12-21 12:19:12 +00002447 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2448 TranscendentalCacheStub::UNTAGGED);
2449 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2450}
2451
2452
2453void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2454 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2455 TranscendentalCacheStub stub(TranscendentalCache::COS,
2456 TranscendentalCacheStub::UNTAGGED);
2457 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2458}
2459
2460
2461void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2462 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2463 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2464 TranscendentalCacheStub::UNTAGGED);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002465 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2466}
2467
2468
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002469void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2470 switch (instr->op()) {
2471 case kMathAbs:
2472 DoMathAbs(instr);
2473 break;
2474 case kMathFloor:
2475 DoMathFloor(instr);
2476 break;
2477 case kMathRound:
2478 DoMathRound(instr);
2479 break;
2480 case kMathSqrt:
2481 DoMathSqrt(instr);
2482 break;
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002483 case kMathPowHalf:
2484 DoMathPowHalf(instr);
2485 break;
whesse@chromium.org023421e2010-12-21 12:19:12 +00002486 case kMathCos:
2487 DoMathCos(instr);
2488 break;
2489 case kMathSin:
2490 DoMathSin(instr);
2491 break;
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002492 case kMathLog:
2493 DoMathLog(instr);
2494 break;
2495
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002496 default:
2497 UNREACHABLE();
2498 }
2499}
2500
2501
2502void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2503 ASSERT(ToRegister(instr->result()).is(eax));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002504 ASSERT(ToRegister(instr->InputAt(0)).is(ecx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002505
2506 int arity = instr->arity();
2507 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2508 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2509 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2510}
2511
2512
2513void LCodeGen::DoCallNamed(LCallNamed* instr) {
2514 ASSERT(ToRegister(instr->result()).is(eax));
2515
2516 int arity = instr->arity();
2517 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2518 __ mov(ecx, instr->name());
2519 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2520 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2521}
2522
2523
2524void LCodeGen::DoCallFunction(LCallFunction* instr) {
2525 ASSERT(ToRegister(instr->result()).is(eax));
2526
2527 int arity = instr->arity();
2528 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2529 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2530 __ Drop(1);
2531 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2532}
2533
2534
2535void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2536 ASSERT(ToRegister(instr->result()).is(eax));
2537
2538 int arity = instr->arity();
2539 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2540 __ mov(ecx, instr->name());
2541 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2542 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2543}
2544
2545
2546void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2547 ASSERT(ToRegister(instr->result()).is(eax));
2548 __ mov(edi, instr->target());
2549 CallKnownFunction(instr->target(), instr->arity(), instr);
2550}
2551
2552
2553void LCodeGen::DoCallNew(LCallNew* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002554 ASSERT(ToRegister(instr->InputAt(0)).is(edi));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002555 ASSERT(ToRegister(instr->result()).is(eax));
2556
2557 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2558 __ Set(eax, Immediate(instr->arity()));
2559 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2560}
2561
2562
2563void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2564 CallRuntime(instr->function(), instr->arity(), instr);
2565}
2566
2567
2568void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2569 Register object = ToRegister(instr->object());
2570 Register value = ToRegister(instr->value());
2571 int offset = instr->offset();
2572
2573 if (!instr->transition().is_null()) {
2574 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
2575 }
2576
2577 // Do the store.
2578 if (instr->is_in_object()) {
2579 __ mov(FieldOperand(object, offset), value);
2580 if (instr->needs_write_barrier()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002581 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002582 // Update the write barrier for the object for in-object properties.
2583 __ RecordWrite(object, offset, value, temp);
2584 }
2585 } else {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002586 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002587 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
2588 __ mov(FieldOperand(temp, offset), value);
2589 if (instr->needs_write_barrier()) {
2590 // Update the write barrier for the properties array.
2591 // object is used as a scratch register.
2592 __ RecordWrite(temp, offset, value, object);
2593 }
2594 }
2595}
2596
2597
2598void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2599 ASSERT(ToRegister(instr->object()).is(edx));
2600 ASSERT(ToRegister(instr->value()).is(eax));
2601
2602 __ mov(ecx, instr->name());
2603 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
2604 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2605}
2606
2607
2608void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2609 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
2610 DeoptimizeIf(above_equal, instr->environment());
2611}
2612
2613
2614void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
2615 Register value = ToRegister(instr->value());
2616 Register elements = ToRegister(instr->object());
2617 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
2618
2619 // Do the store.
2620 if (instr->key()->IsConstantOperand()) {
2621 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
2622 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
2623 int offset =
2624 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
2625 __ mov(FieldOperand(elements, offset), value);
2626 } else {
2627 __ mov(FieldOperand(elements, key, times_4, FixedArray::kHeaderSize),
2628 value);
2629 }
2630
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002631 if (instr->hydrogen()->NeedsWriteBarrier()) {
2632 // Compute address of modified element and store it into key register.
2633 __ lea(key, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize));
2634 __ RecordWrite(elements, key, value);
2635 }
2636}
2637
2638
2639void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2640 ASSERT(ToRegister(instr->object()).is(edx));
2641 ASSERT(ToRegister(instr->key()).is(ecx));
2642 ASSERT(ToRegister(instr->value()).is(eax));
2643
2644 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
2645 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2646}
2647
2648
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002649void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
2650 class DeferredStringCharCodeAt: public LDeferredCode {
2651 public:
2652 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
2653 : LDeferredCode(codegen), instr_(instr) { }
2654 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
2655 private:
2656 LStringCharCodeAt* instr_;
2657 };
2658
2659 Register string = ToRegister(instr->string());
2660 Register index = no_reg;
2661 int const_index = -1;
2662 if (instr->index()->IsConstantOperand()) {
2663 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2664 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2665 if (!Smi::IsValid(const_index)) {
2666 // Guaranteed to be out of bounds because of the assert above.
2667 // So the bounds check that must dominate this instruction must
2668 // have deoptimized already.
2669 if (FLAG_debug_code) {
2670 __ Abort("StringCharCodeAt: out of bounds index.");
2671 }
2672 // No code needs to be generated.
2673 return;
2674 }
2675 } else {
2676 index = ToRegister(instr->index());
2677 }
2678 Register result = ToRegister(instr->result());
2679
2680 DeferredStringCharCodeAt* deferred =
2681 new DeferredStringCharCodeAt(this, instr);
2682
2683 NearLabel flat_string, ascii_string, done;
2684
2685 // Fetch the instance type of the receiver into result register.
2686 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
2687 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
2688
2689 // We need special handling for non-flat strings.
2690 STATIC_ASSERT(kSeqStringTag == 0);
2691 __ test(result, Immediate(kStringRepresentationMask));
2692 __ j(zero, &flat_string);
2693
2694 // Handle non-flat strings.
2695 __ test(result, Immediate(kIsConsStringMask));
2696 __ j(zero, deferred->entry());
2697
2698 // ConsString.
2699 // Check whether the right hand side is the empty string (i.e. if
2700 // this is really a flat string in a cons string). If that is not
2701 // the case we would rather go to the runtime system now to flatten
2702 // the string.
2703 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
2704 Immediate(Factory::empty_string()));
2705 __ j(not_equal, deferred->entry());
2706 // Get the first of the two strings and load its instance type.
2707 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
2708 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
2709 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
2710 // If the first cons component is also non-flat, then go to runtime.
2711 STATIC_ASSERT(kSeqStringTag == 0);
2712 __ test(result, Immediate(kStringRepresentationMask));
2713 __ j(not_zero, deferred->entry());
2714
2715 // Check for 1-byte or 2-byte string.
2716 __ bind(&flat_string);
2717 STATIC_ASSERT(kAsciiStringTag != 0);
2718 __ test(result, Immediate(kStringEncodingMask));
2719 __ j(not_zero, &ascii_string);
2720
2721 // 2-byte string.
2722 // Load the 2-byte character code into the result register.
2723 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2724 if (instr->index()->IsConstantOperand()) {
2725 __ movzx_w(result,
2726 FieldOperand(string,
2727 SeqTwoByteString::kHeaderSize + 2 * const_index));
2728 } else {
2729 __ movzx_w(result, FieldOperand(string,
2730 index,
2731 times_2,
2732 SeqTwoByteString::kHeaderSize));
2733 }
2734 __ jmp(&done);
2735
2736 // ASCII string.
2737 // Load the byte into the result register.
2738 __ bind(&ascii_string);
2739 if (instr->index()->IsConstantOperand()) {
2740 __ movzx_b(result, FieldOperand(string,
2741 SeqAsciiString::kHeaderSize + const_index));
2742 } else {
2743 __ movzx_b(result, FieldOperand(string,
2744 index,
2745 times_1,
2746 SeqAsciiString::kHeaderSize));
2747 }
2748 __ bind(&done);
2749 __ bind(deferred->exit());
2750}
2751
2752
2753void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
2754 Register string = ToRegister(instr->string());
2755 Register result = ToRegister(instr->result());
2756
2757 // TODO(3095996): Get rid of this. For now, we need to make the
2758 // result register contain a valid pointer because it is already
2759 // contained in the register pointer map.
2760 __ Set(result, Immediate(0));
2761
2762 __ PushSafepointRegisters();
2763 __ push(string);
2764 // Push the index as a smi. This is safe because of the checks in
2765 // DoStringCharCodeAt above.
2766 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2767 if (instr->index()->IsConstantOperand()) {
2768 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2769 __ push(Immediate(Smi::FromInt(const_index)));
2770 } else {
2771 Register index = ToRegister(instr->index());
2772 __ SmiTag(index);
2773 __ push(index);
2774 }
2775 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
2776 RecordSafepointWithRegisters(
2777 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
2778 if (FLAG_debug_code) {
2779 __ AbortIfNotSmi(eax);
2780 }
2781 __ SmiUntag(eax);
2782 __ mov(Operand(esp, EspIndexForPushAll(result) * kPointerSize), eax);
2783 __ PopSafepointRegisters();
2784}
2785
2786
2787void LCodeGen::DoStringLength(LStringLength* instr) {
2788 Register string = ToRegister(instr->string());
2789 Register result = ToRegister(instr->result());
2790 __ mov(result, FieldOperand(string, String::kLengthOffset));
2791}
2792
2793
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002794void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002795 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002796 ASSERT(input->IsRegister() || input->IsStackSlot());
2797 LOperand* output = instr->result();
2798 ASSERT(output->IsDoubleRegister());
2799 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
2800}
2801
2802
2803void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
2804 class DeferredNumberTagI: public LDeferredCode {
2805 public:
2806 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
2807 : LDeferredCode(codegen), instr_(instr) { }
2808 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
2809 private:
2810 LNumberTagI* instr_;
2811 };
2812
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002813 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002814 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2815 Register reg = ToRegister(input);
2816
2817 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
2818 __ SmiTag(reg);
2819 __ j(overflow, deferred->entry());
2820 __ bind(deferred->exit());
2821}
2822
2823
2824void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
2825 Label slow;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002826 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002827 Register tmp = reg.is(eax) ? ecx : eax;
2828
2829 // Preserve the value of all registers.
2830 __ PushSafepointRegisters();
2831
2832 // There was overflow, so bits 30 and 31 of the original integer
2833 // disagree. Try to allocate a heap number in new space and store
2834 // the value in there. If that fails, call the runtime system.
2835 NearLabel done;
2836 __ SmiUntag(reg);
2837 __ xor_(reg, 0x80000000);
2838 __ cvtsi2sd(xmm0, Operand(reg));
2839 if (FLAG_inline_new) {
2840 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
2841 __ jmp(&done);
2842 }
2843
2844 // Slow case: Call the runtime system to do the number allocation.
2845 __ bind(&slow);
2846
2847 // TODO(3095996): Put a valid pointer value in the stack slot where the result
2848 // register is stored, as this register is in the pointer map, but contains an
2849 // integer value.
2850 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0));
2851
2852 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2853 RecordSafepointWithRegisters(
2854 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2855 if (!reg.is(eax)) __ mov(reg, eax);
2856
2857 // Done. Put the value in xmm0 into the value of the allocated heap
2858 // number.
2859 __ bind(&done);
2860 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
2861 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), reg);
2862 __ PopSafepointRegisters();
2863}
2864
2865
2866void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
2867 class DeferredNumberTagD: public LDeferredCode {
2868 public:
2869 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
2870 : LDeferredCode(codegen), instr_(instr) { }
2871 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
2872 private:
2873 LNumberTagD* instr_;
2874 };
2875
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002876 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002877 Register reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002878 Register tmp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002879
2880 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
2881 if (FLAG_inline_new) {
2882 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
2883 } else {
2884 __ jmp(deferred->entry());
2885 }
2886 __ bind(deferred->exit());
2887 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
2888}
2889
2890
2891void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
2892 // TODO(3095996): Get rid of this. For now, we need to make the
2893 // result register contain a valid pointer because it is already
2894 // contained in the register pointer map.
2895 Register reg = ToRegister(instr->result());
2896 __ Set(reg, Immediate(0));
2897
2898 __ PushSafepointRegisters();
2899 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2900 RecordSafepointWithRegisters(
2901 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2902 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), eax);
2903 __ PopSafepointRegisters();
2904}
2905
2906
2907void LCodeGen::DoSmiTag(LSmiTag* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002908 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002909 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2910 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
2911 __ SmiTag(ToRegister(input));
2912}
2913
2914
2915void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002916 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002917 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2918 if (instr->needs_check()) {
2919 __ test(ToRegister(input), Immediate(kSmiTagMask));
2920 DeoptimizeIf(not_zero, instr->environment());
2921 }
2922 __ SmiUntag(ToRegister(input));
2923}
2924
2925
2926void LCodeGen::EmitNumberUntagD(Register input_reg,
2927 XMMRegister result_reg,
2928 LEnvironment* env) {
2929 NearLabel load_smi, heap_number, done;
2930
2931 // Smi check.
2932 __ test(input_reg, Immediate(kSmiTagMask));
2933 __ j(zero, &load_smi, not_taken);
2934
2935 // Heap number map check.
2936 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2937 Factory::heap_number_map());
2938 __ j(equal, &heap_number);
2939
2940 __ cmp(input_reg, Factory::undefined_value());
2941 DeoptimizeIf(not_equal, env);
2942
2943 // Convert undefined to NaN.
2944 __ push(input_reg);
2945 __ mov(input_reg, Factory::nan_value());
2946 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
2947 __ pop(input_reg);
2948 __ jmp(&done);
2949
2950 // Heap number to XMM conversion.
2951 __ bind(&heap_number);
2952 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
2953 __ jmp(&done);
2954
2955 // Smi to XMM conversion
2956 __ bind(&load_smi);
2957 __ SmiUntag(input_reg); // Untag smi before converting to float.
2958 __ cvtsi2sd(result_reg, Operand(input_reg));
2959 __ SmiTag(input_reg); // Retag smi.
2960 __ bind(&done);
2961}
2962
2963
2964class DeferredTaggedToI: public LDeferredCode {
2965 public:
2966 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
2967 : LDeferredCode(codegen), instr_(instr) { }
2968 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
2969 private:
2970 LTaggedToI* instr_;
2971};
2972
2973
2974void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
2975 NearLabel done, heap_number;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002976 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002977
2978 // Heap number map check.
2979 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2980 Factory::heap_number_map());
2981
2982 if (instr->truncating()) {
2983 __ j(equal, &heap_number);
2984 // Check for undefined. Undefined is converted to zero for truncating
2985 // conversions.
2986 __ cmp(input_reg, Factory::undefined_value());
2987 DeoptimizeIf(not_equal, instr->environment());
2988 __ mov(input_reg, 0);
2989 __ jmp(&done);
2990
2991 __ bind(&heap_number);
2992 if (CpuFeatures::IsSupported(SSE3)) {
2993 CpuFeatures::Scope scope(SSE3);
2994 NearLabel convert;
2995 // Use more powerful conversion when sse3 is available.
2996 // Load x87 register with heap number.
2997 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
2998 // Get exponent alone and check for too-big exponent.
2999 __ mov(input_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3000 __ and_(input_reg, HeapNumber::kExponentMask);
3001 const uint32_t kTooBigExponent =
3002 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3003 __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
3004 __ j(less, &convert);
3005 // Pop FPU stack before deoptimizing.
3006 __ ffree(0);
3007 __ fincstp();
3008 DeoptimizeIf(no_condition, instr->environment());
3009
3010 // Reserve space for 64 bit answer.
3011 __ bind(&convert);
3012 __ sub(Operand(esp), Immediate(kDoubleSize));
3013 // Do conversion, which cannot fail because we checked the exponent.
3014 __ fisttp_d(Operand(esp, 0));
3015 __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3016 __ add(Operand(esp), Immediate(kDoubleSize));
3017 } else {
3018 NearLabel deopt;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003019 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003020 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3021 __ cvttsd2si(input_reg, Operand(xmm0));
3022 __ cmp(input_reg, 0x80000000u);
3023 __ j(not_equal, &done);
3024 // Check if the input was 0x8000000 (kMinInt).
3025 // If no, then we got an overflow and we deoptimize.
3026 ExternalReference min_int = ExternalReference::address_of_min_int();
3027 __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
3028 __ ucomisd(xmm_temp, xmm0);
3029 DeoptimizeIf(not_equal, instr->environment());
3030 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3031 }
3032 } else {
3033 // Deoptimize if we don't have a heap number.
3034 DeoptimizeIf(not_equal, instr->environment());
3035
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003036 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003037 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3038 __ cvttsd2si(input_reg, Operand(xmm0));
3039 __ cvtsi2sd(xmm_temp, Operand(input_reg));
3040 __ ucomisd(xmm0, xmm_temp);
3041 DeoptimizeIf(not_equal, instr->environment());
3042 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3043 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3044 __ test(input_reg, Operand(input_reg));
3045 __ j(not_zero, &done);
3046 __ movmskpd(input_reg, xmm0);
3047 __ and_(input_reg, 1);
3048 DeoptimizeIf(not_zero, instr->environment());
3049 }
3050 }
3051 __ bind(&done);
3052}
3053
3054
3055void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003056 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003057 ASSERT(input->IsRegister());
3058 ASSERT(input->Equals(instr->result()));
3059
3060 Register input_reg = ToRegister(input);
3061
3062 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3063
3064 // Smi check.
3065 __ test(input_reg, Immediate(kSmiTagMask));
3066 __ j(not_zero, deferred->entry());
3067
3068 // Smi to int32 conversion
3069 __ SmiUntag(input_reg); // Untag smi.
3070
3071 __ bind(deferred->exit());
3072}
3073
3074
3075void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003076 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003077 ASSERT(input->IsRegister());
3078 LOperand* result = instr->result();
3079 ASSERT(result->IsDoubleRegister());
3080
3081 Register input_reg = ToRegister(input);
3082 XMMRegister result_reg = ToDoubleRegister(result);
3083
3084 EmitNumberUntagD(input_reg, result_reg, instr->environment());
3085}
3086
3087
3088void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003089 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003090 ASSERT(input->IsDoubleRegister());
3091 LOperand* result = instr->result();
3092 ASSERT(result->IsRegister());
3093
3094 XMMRegister input_reg = ToDoubleRegister(input);
3095 Register result_reg = ToRegister(result);
3096
3097 if (instr->truncating()) {
3098 // Performs a truncating conversion of a floating point number as used by
3099 // the JS bitwise operations.
3100 __ cvttsd2si(result_reg, Operand(input_reg));
3101 __ cmp(result_reg, 0x80000000u);
3102 if (CpuFeatures::IsSupported(SSE3)) {
3103 // This will deoptimize if the exponent of the input in out of range.
3104 CpuFeatures::Scope scope(SSE3);
3105 NearLabel convert, done;
3106 __ j(not_equal, &done);
3107 __ sub(Operand(esp), Immediate(kDoubleSize));
3108 __ movdbl(Operand(esp, 0), input_reg);
3109 // Get exponent alone and check for too-big exponent.
3110 __ mov(result_reg, Operand(esp, sizeof(int32_t)));
3111 __ and_(result_reg, HeapNumber::kExponentMask);
3112 const uint32_t kTooBigExponent =
3113 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3114 __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
3115 __ j(less, &convert);
3116 __ add(Operand(esp), Immediate(kDoubleSize));
3117 DeoptimizeIf(no_condition, instr->environment());
3118 __ bind(&convert);
3119 // Do conversion, which cannot fail because we checked the exponent.
3120 __ fld_d(Operand(esp, 0));
3121 __ fisttp_d(Operand(esp, 0));
3122 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
3123 __ add(Operand(esp), Immediate(kDoubleSize));
3124 __ bind(&done);
3125 } else {
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003126 NearLabel done;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003127 Register temp_reg = ToRegister(instr->TempAt(0));
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003128 XMMRegister xmm_scratch = xmm0;
3129
3130 // If cvttsd2si succeeded, we're done. Otherwise, we attempt
3131 // manual conversion.
3132 __ j(not_equal, &done);
3133
3134 // Get high 32 bits of the input in result_reg and temp_reg.
3135 __ pshufd(xmm_scratch, input_reg, 1);
3136 __ movd(Operand(temp_reg), xmm_scratch);
3137 __ mov(result_reg, temp_reg);
3138
3139 // Prepare negation mask in temp_reg.
3140 __ sar(temp_reg, kBitsPerInt - 1);
3141
3142 // Extract the exponent from result_reg and subtract adjusted
3143 // bias from it. The adjustment is selected in a way such that
3144 // when the difference is zero, the answer is in the low 32 bits
3145 // of the input, otherwise a shift has to be performed.
3146 __ shr(result_reg, HeapNumber::kExponentShift);
3147 __ and_(result_reg,
3148 HeapNumber::kExponentMask >> HeapNumber::kExponentShift);
3149 __ sub(Operand(result_reg),
3150 Immediate(HeapNumber::kExponentBias +
3151 HeapNumber::kExponentBits +
3152 HeapNumber::kMantissaBits));
3153 // Don't handle big (> kMantissaBits + kExponentBits == 63) or
3154 // special exponents.
3155 DeoptimizeIf(greater, instr->environment());
3156
3157 // Zero out the sign and the exponent in the input (by shifting
3158 // it to the left) and restore the implicit mantissa bit,
3159 // i.e. convert the input to unsigned int64 shifted left by
3160 // kExponentBits.
3161 ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
3162 // Minus zero has the most significant bit set and the other
3163 // bits cleared.
3164 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
3165 __ psllq(input_reg, HeapNumber::kExponentBits);
3166 __ por(input_reg, xmm_scratch);
3167
3168 // Get the amount to shift the input right in xmm_scratch.
3169 __ neg(result_reg);
3170 __ movd(xmm_scratch, Operand(result_reg));
3171
3172 // Shift the input right and extract low 32 bits.
3173 __ psrlq(input_reg, xmm_scratch);
3174 __ movd(Operand(result_reg), input_reg);
3175
3176 // Use the prepared mask in temp_reg to negate the result if necessary.
3177 __ xor_(result_reg, Operand(temp_reg));
3178 __ sub(result_reg, Operand(temp_reg));
3179 __ bind(&done);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003180 }
3181 } else {
3182 NearLabel done;
3183 __ cvttsd2si(result_reg, Operand(input_reg));
3184 __ cvtsi2sd(xmm0, Operand(result_reg));
3185 __ ucomisd(xmm0, input_reg);
3186 DeoptimizeIf(not_equal, instr->environment());
3187 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3188 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3189 // The integer converted back is equal to the original. We
3190 // only have to test if we got -0 as an input.
3191 __ test(result_reg, Operand(result_reg));
3192 __ j(not_zero, &done);
3193 __ movmskpd(result_reg, input_reg);
3194 // Bit 0 contains the sign of the double in input_reg.
3195 // If input was positive, we are ok and return 0, otherwise
3196 // deoptimize.
3197 __ and_(result_reg, 1);
3198 DeoptimizeIf(not_zero, instr->environment());
3199 }
3200 __ bind(&done);
3201 }
3202}
3203
3204
3205void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003206 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003207 ASSERT(input->IsRegister());
3208 __ test(ToRegister(input), Immediate(kSmiTagMask));
3209 DeoptimizeIf(instr->condition(), instr->environment());
3210}
3211
3212
3213void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003214 Register input = ToRegister(instr->InputAt(0));
3215 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003216 InstanceType first = instr->hydrogen()->first();
3217 InstanceType last = instr->hydrogen()->last();
3218
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003219 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003220
3221 // If there is only one type in the interval check for equality.
3222 if (first == last) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003223 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3224 static_cast<int8_t>(first));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003225 DeoptimizeIf(not_equal, instr->environment());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003226 } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
3227 // String has a dedicated bit in instance type.
3228 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), kIsNotStringMask);
3229 DeoptimizeIf(not_zero, instr->environment());
3230 } else {
3231 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3232 static_cast<int8_t>(first));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003233 DeoptimizeIf(below, instr->environment());
3234 // Omit check for the last type.
3235 if (last != LAST_TYPE) {
3236 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3237 static_cast<int8_t>(last));
3238 DeoptimizeIf(above, instr->environment());
3239 }
3240 }
3241}
3242
3243
3244void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003245 ASSERT(instr->InputAt(0)->IsRegister());
3246 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003247 __ cmp(reg, instr->hydrogen()->target());
3248 DeoptimizeIf(not_equal, instr->environment());
3249}
3250
3251
3252void LCodeGen::DoCheckMap(LCheckMap* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003253 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003254 ASSERT(input->IsRegister());
3255 Register reg = ToRegister(input);
3256 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3257 instr->hydrogen()->map());
3258 DeoptimizeIf(not_equal, instr->environment());
3259}
3260
3261
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003262void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
3263 if (Heap::InNewSpace(*object)) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003264 Handle<JSGlobalPropertyCell> cell =
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003265 Factory::NewJSGlobalPropertyCell(object);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003266 __ mov(result, Operand::Cell(cell));
3267 } else {
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003268 __ mov(result, object);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003269 }
3270}
3271
3272
3273void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003274 Register reg = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003275
3276 Handle<JSObject> holder = instr->holder();
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003277 Handle<JSObject> current_prototype = instr->prototype();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003278
3279 // Load prototype object.
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003280 LoadHeapObject(reg, current_prototype);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003281
3282 // Check prototype maps up to the holder.
3283 while (!current_prototype.is_identical_to(holder)) {
3284 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3285 Handle<Map>(current_prototype->map()));
3286 DeoptimizeIf(not_equal, instr->environment());
3287 current_prototype =
3288 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3289 // Load next prototype object.
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003290 LoadHeapObject(reg, current_prototype);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003291 }
3292
3293 // Check the holder map.
3294 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3295 Handle<Map>(current_prototype->map()));
3296 DeoptimizeIf(not_equal, instr->environment());
3297}
3298
3299
3300void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3301 // Setup the parameters to the stub/runtime call.
3302 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3303 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3304 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3305 __ push(Immediate(instr->hydrogen()->constant_elements()));
3306
3307 // Pick the right runtime function or stub to call.
3308 int length = instr->hydrogen()->length();
3309 if (instr->hydrogen()->IsCopyOnWrite()) {
3310 ASSERT(instr->hydrogen()->depth() == 1);
3311 FastCloneShallowArrayStub::Mode mode =
3312 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3313 FastCloneShallowArrayStub stub(mode, length);
3314 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3315 } else if (instr->hydrogen()->depth() > 1) {
3316 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3317 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3318 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3319 } else {
3320 FastCloneShallowArrayStub::Mode mode =
3321 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3322 FastCloneShallowArrayStub stub(mode, length);
3323 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3324 }
3325}
3326
3327
3328void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3329 // Setup the parameters to the stub/runtime call.
3330 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3331 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3332 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3333 __ push(Immediate(instr->hydrogen()->constant_properties()));
3334 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3335
lrn@chromium.org5d00b602011-01-05 09:51:43 +00003336 // Pick the right runtime function to call.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003337 if (instr->hydrogen()->depth() > 1) {
3338 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3339 } else {
3340 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3341 }
3342}
3343
3344
3345void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3346 NearLabel materialized;
3347 // Registers will be used as follows:
3348 // edi = JS function.
3349 // ecx = literals array.
3350 // ebx = regexp literal.
3351 // eax = regexp literal clone.
3352 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3353 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3354 int literal_offset = FixedArray::kHeaderSize +
3355 instr->hydrogen()->literal_index() * kPointerSize;
3356 __ mov(ebx, FieldOperand(ecx, literal_offset));
3357 __ cmp(ebx, Factory::undefined_value());
3358 __ j(not_equal, &materialized);
3359
3360 // Create regexp literal using runtime function
3361 // Result will be in eax.
3362 __ push(ecx);
3363 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3364 __ push(Immediate(instr->hydrogen()->pattern()));
3365 __ push(Immediate(instr->hydrogen()->flags()));
3366 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3367 __ mov(ebx, eax);
3368
3369 __ bind(&materialized);
3370 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3371 Label allocated, runtime_allocate;
3372 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3373 __ jmp(&allocated);
3374
3375 __ bind(&runtime_allocate);
3376 __ push(ebx);
3377 __ push(Immediate(Smi::FromInt(size)));
3378 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3379 __ pop(ebx);
3380
3381 __ bind(&allocated);
3382 // Copy the content into the newly allocated memory.
3383 // (Unroll copy loop once for better throughput).
3384 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3385 __ mov(edx, FieldOperand(ebx, i));
3386 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3387 __ mov(FieldOperand(eax, i), edx);
3388 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3389 }
3390 if ((size % (2 * kPointerSize)) != 0) {
3391 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3392 __ mov(FieldOperand(eax, size - kPointerSize), edx);
3393 }
3394}
3395
3396
3397void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3398 // Use the fast case closure allocation code that allocates in new
3399 // space for nested functions that don't need literals cloning.
3400 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3401 bool pretenure = !instr->hydrogen()->pretenure();
3402 if (shared_info->num_literals() == 0 && !pretenure) {
3403 FastNewClosureStub stub;
3404 __ push(Immediate(shared_info));
3405 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3406 } else {
3407 __ push(esi);
3408 __ push(Immediate(shared_info));
3409 __ push(Immediate(pretenure
3410 ? Factory::true_value()
3411 : Factory::false_value()));
3412 CallRuntime(Runtime::kNewClosure, 3, instr);
3413 }
3414}
3415
3416
3417void LCodeGen::DoTypeof(LTypeof* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003418 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003419 if (input->IsConstantOperand()) {
3420 __ push(ToImmediate(input));
3421 } else {
3422 __ push(ToOperand(input));
3423 }
3424 CallRuntime(Runtime::kTypeof, 1, instr);
3425}
3426
3427
3428void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003429 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003430 Register result = ToRegister(instr->result());
3431 Label true_label;
3432 Label false_label;
3433 NearLabel done;
3434
3435 Condition final_branch_condition = EmitTypeofIs(&true_label,
3436 &false_label,
3437 input,
3438 instr->type_literal());
3439 __ j(final_branch_condition, &true_label);
3440 __ bind(&false_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003441 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003442 __ jmp(&done);
3443
3444 __ bind(&true_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003445 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003446
3447 __ bind(&done);
3448}
3449
3450
3451void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003452 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003453 int true_block = chunk_->LookupDestination(instr->true_block_id());
3454 int false_block = chunk_->LookupDestination(instr->false_block_id());
3455 Label* true_label = chunk_->GetAssemblyLabel(true_block);
3456 Label* false_label = chunk_->GetAssemblyLabel(false_block);
3457
3458 Condition final_branch_condition = EmitTypeofIs(true_label,
3459 false_label,
3460 input,
3461 instr->type_literal());
3462
3463 EmitBranch(true_block, false_block, final_branch_condition);
3464}
3465
3466
3467Condition LCodeGen::EmitTypeofIs(Label* true_label,
3468 Label* false_label,
3469 Register input,
3470 Handle<String> type_name) {
3471 Condition final_branch_condition = no_condition;
3472 if (type_name->Equals(Heap::number_symbol())) {
3473 __ test(input, Immediate(kSmiTagMask));
3474 __ j(zero, true_label);
3475 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
3476 Factory::heap_number_map());
3477 final_branch_condition = equal;
3478
3479 } else if (type_name->Equals(Heap::string_symbol())) {
3480 __ test(input, Immediate(kSmiTagMask));
3481 __ j(zero, false_label);
3482 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
3483 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3484 1 << Map::kIsUndetectable);
3485 __ j(not_zero, false_label);
3486 __ CmpInstanceType(input, FIRST_NONSTRING_TYPE);
3487 final_branch_condition = below;
3488
3489 } else if (type_name->Equals(Heap::boolean_symbol())) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003490 __ cmp(input, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003491 __ j(equal, true_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003492 __ cmp(input, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003493 final_branch_condition = equal;
3494
3495 } else if (type_name->Equals(Heap::undefined_symbol())) {
3496 __ cmp(input, Factory::undefined_value());
3497 __ j(equal, true_label);
3498 __ test(input, Immediate(kSmiTagMask));
3499 __ j(zero, false_label);
3500 // Check for undetectable objects => true.
3501 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
3502 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3503 1 << Map::kIsUndetectable);
3504 final_branch_condition = not_zero;
3505
3506 } else if (type_name->Equals(Heap::function_symbol())) {
3507 __ test(input, Immediate(kSmiTagMask));
3508 __ j(zero, false_label);
3509 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
3510 __ j(equal, true_label);
3511 // Regular expressions => 'function' (they are callable).
3512 __ CmpInstanceType(input, JS_REGEXP_TYPE);
3513 final_branch_condition = equal;
3514
3515 } else if (type_name->Equals(Heap::object_symbol())) {
3516 __ test(input, Immediate(kSmiTagMask));
3517 __ j(zero, false_label);
3518 __ cmp(input, Factory::null_value());
3519 __ j(equal, true_label);
3520 // Regular expressions => 'function', not 'object'.
3521 __ CmpObjectType(input, JS_REGEXP_TYPE, input);
3522 __ j(equal, false_label);
3523 // Check for undetectable objects => false.
3524 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3525 1 << Map::kIsUndetectable);
3526 __ j(not_zero, false_label);
3527 // Check for JS objects => true.
3528 __ CmpInstanceType(input, FIRST_JS_OBJECT_TYPE);
3529 __ j(below, false_label);
3530 __ CmpInstanceType(input, LAST_JS_OBJECT_TYPE);
3531 final_branch_condition = below_equal;
3532
3533 } else {
3534 final_branch_condition = not_equal;
3535 __ jmp(false_label);
3536 // A dead branch instruction will be generated after this point.
3537 }
3538
3539 return final_branch_condition;
3540}
3541
3542
3543void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
3544 // No code for lazy bailout instruction. Used to capture environment after a
3545 // call for populating the safepoint data with deoptimization data.
3546}
3547
3548
3549void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
3550 DeoptimizeIf(no_condition, instr->environment());
3551}
3552
3553
3554void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
3555 LOperand* obj = instr->object();
3556 LOperand* key = instr->key();
3557 __ push(ToOperand(obj));
3558 if (key->IsConstantOperand()) {
3559 __ push(ToImmediate(key));
3560 } else {
3561 __ push(ToOperand(key));
3562 }
3563 RecordPosition(instr->pointer_map()->position());
3564 SafepointGenerator safepoint_generator(this,
3565 instr->pointer_map(),
3566 Safepoint::kNoDeoptimizationIndex);
3567 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
3568}
3569
3570
3571void LCodeGen::DoStackCheck(LStackCheck* instr) {
3572 // Perform stack overflow check.
3573 NearLabel done;
3574 ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
3575 __ cmp(esp, Operand::StaticVariable(stack_limit));
3576 __ j(above_equal, &done);
3577
3578 StackCheckStub stub;
3579 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3580 __ bind(&done);
3581}
3582
3583
3584void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
3585 // This is a pseudo-instruction that ensures that the environment here is
3586 // properly registered for deoptimization and records the assembler's PC
3587 // offset.
3588 LEnvironment* environment = instr->environment();
3589 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3590 instr->SpilledDoubleRegisterArray());
3591
3592 // If the environment were already registered, we would have no way of
3593 // backpatching it with the spill slot operands.
3594 ASSERT(!environment->HasBeenRegistered());
3595 RegisterEnvironmentForDeoptimization(environment);
3596 ASSERT(osr_pc_offset_ == -1);
3597 osr_pc_offset_ = masm()->pc_offset();
3598}
3599
3600
3601#undef __
3602
3603} } // namespace v8::internal
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003604
3605#endif // V8_TARGET_ARCH_IA32