blob: b6a6382017fa9d08088a5619383baa3ec456d672 [file] [log] [blame]
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001// Copyright 2011 the V8 project authors. All rights reserved.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +000028#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
kasperl@chromium.orga5551262010-12-07 12:49:48 +000032#include "ia32/lithium-codegen-ia32.h"
33#include "code-stubs.h"
34#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39
kmillikin@chromium.org31b12772011-02-02 16:08:26 +000040// When invoking builtins, we need to record the safepoint in the middle of
41// the invoke instruction sequence generated by the macro assembler.
kasperl@chromium.orga5551262010-12-07 12:49:48 +000042class SafepointGenerator : public PostCallGenerator {
43 public:
44 SafepointGenerator(LCodeGen* codegen,
45 LPointerMap* pointers,
46 int deoptimization_index)
47 : codegen_(codegen),
48 pointers_(pointers),
49 deoptimization_index_(deoptimization_index) { }
50 virtual ~SafepointGenerator() { }
51
52 virtual void Generate() {
53 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
54 }
55
56 private:
57 LCodeGen* codegen_;
58 LPointerMap* pointers_;
59 int deoptimization_index_;
60};
61
62
63#define __ masm()->
64
65bool LCodeGen::GenerateCode() {
66 HPhase phase("Code generation", chunk());
67 ASSERT(is_unused());
68 status_ = GENERATING;
69 CpuFeatures::Scope scope(SSE2);
70 return GeneratePrologue() &&
71 GenerateBody() &&
72 GenerateDeferredCode() &&
73 GenerateSafepointTable();
74}
75
76
77void LCodeGen::FinishCode(Handle<Code> code) {
78 ASSERT(is_done());
79 code->set_stack_slots(StackSlotCount());
ricow@chromium.org83aa5492011-02-07 12:42:56 +000080 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
kasperl@chromium.orga5551262010-12-07 12:49:48 +000081 PopulateDeoptimizationData(code);
82}
83
84
85void LCodeGen::Abort(const char* format, ...) {
86 if (FLAG_trace_bailout) {
87 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
88 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
89 va_list arguments;
90 va_start(arguments, format);
91 OS::VPrint(format, arguments);
92 va_end(arguments);
93 PrintF("\n");
94 }
95 status_ = ABORTED;
96}
97
98
99void LCodeGen::Comment(const char* format, ...) {
100 if (!FLAG_code_comments) return;
101 char buffer[4 * KB];
102 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
103 va_list arguments;
104 va_start(arguments, format);
105 builder.AddFormattedList(format, arguments);
106 va_end(arguments);
107
108 // Copy the string before recording it in the assembler to avoid
109 // issues when the stack allocated buffer goes out of scope.
110 size_t length = builder.position();
111 Vector<char> copy = Vector<char>::New(length + 1);
112 memcpy(copy.start(), builder.Finalize(), copy.length());
113 masm()->RecordComment(copy.start());
114}
115
116
117bool LCodeGen::GeneratePrologue() {
118 ASSERT(is_generating());
119
120#ifdef DEBUG
121 if (strlen(FLAG_stop_at) > 0 &&
122 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
123 __ int3();
124 }
125#endif
126
127 __ push(ebp); // Caller's frame pointer.
128 __ mov(ebp, esp);
129 __ push(esi); // Callee's context.
130 __ push(edi); // Callee's JS function.
131
132 // Reserve space for the stack slots needed by the code.
133 int slots = StackSlotCount();
134 if (slots > 0) {
135 if (FLAG_debug_code) {
136 __ mov(Operand(eax), Immediate(slots));
137 Label loop;
138 __ bind(&loop);
139 __ push(Immediate(kSlotsZapValue));
140 __ dec(eax);
141 __ j(not_zero, &loop);
142 } else {
143 __ sub(Operand(esp), Immediate(slots * kPointerSize));
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000144#ifdef _MSC_VER
145 // On windows, you may not access the stack more than one page below
146 // the most recently mapped page. To make the allocated area randomly
147 // accessible, we write to each page in turn (the value is irrelevant).
148 const int kPageSize = 4 * KB;
149 for (int offset = slots * kPointerSize - kPageSize;
150 offset > 0;
151 offset -= kPageSize) {
152 __ mov(Operand(esp, offset), eax);
153 }
154#endif
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000155 }
156 }
157
158 // Trace the call.
159 if (FLAG_trace) {
160 __ CallRuntime(Runtime::kTraceEnter, 0);
161 }
162 return !is_aborted();
163}
164
165
166bool LCodeGen::GenerateBody() {
167 ASSERT(is_generating());
168 bool emit_instructions = true;
169 for (current_instruction_ = 0;
170 !is_aborted() && current_instruction_ < instructions_->length();
171 current_instruction_++) {
172 LInstruction* instr = instructions_->at(current_instruction_);
173 if (instr->IsLabel()) {
174 LLabel* label = LLabel::cast(instr);
175 emit_instructions = !label->HasReplacement();
176 }
177
178 if (emit_instructions) {
179 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
180 instr->CompileToNative(this);
181 }
182 }
183 return !is_aborted();
184}
185
186
187LInstruction* LCodeGen::GetNextInstruction() {
188 if (current_instruction_ < instructions_->length() - 1) {
189 return instructions_->at(current_instruction_ + 1);
190 } else {
191 return NULL;
192 }
193}
194
195
196bool LCodeGen::GenerateDeferredCode() {
197 ASSERT(is_generating());
198 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
199 LDeferredCode* code = deferred_[i];
200 __ bind(code->entry());
201 code->Generate();
202 __ jmp(code->exit());
203 }
204
205 // Deferred code is the last part of the instruction sequence. Mark
206 // the generated code as done unless we bailed out.
207 if (!is_aborted()) status_ = DONE;
208 return !is_aborted();
209}
210
211
212bool LCodeGen::GenerateSafepointTable() {
213 ASSERT(is_done());
214 safepoints_.Emit(masm(), StackSlotCount());
215 return !is_aborted();
216}
217
218
219Register LCodeGen::ToRegister(int index) const {
220 return Register::FromAllocationIndex(index);
221}
222
223
224XMMRegister LCodeGen::ToDoubleRegister(int index) const {
225 return XMMRegister::FromAllocationIndex(index);
226}
227
228
229Register LCodeGen::ToRegister(LOperand* op) const {
230 ASSERT(op->IsRegister());
231 return ToRegister(op->index());
232}
233
234
235XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
236 ASSERT(op->IsDoubleRegister());
237 return ToDoubleRegister(op->index());
238}
239
240
241int LCodeGen::ToInteger32(LConstantOperand* op) const {
242 Handle<Object> value = chunk_->LookupLiteral(op);
243 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
244 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
245 value->Number());
246 return static_cast<int32_t>(value->Number());
247}
248
249
250Immediate LCodeGen::ToImmediate(LOperand* op) {
251 LConstantOperand* const_op = LConstantOperand::cast(op);
252 Handle<Object> literal = chunk_->LookupLiteral(const_op);
253 Representation r = chunk_->LookupLiteralRepresentation(const_op);
254 if (r.IsInteger32()) {
255 ASSERT(literal->IsNumber());
256 return Immediate(static_cast<int32_t>(literal->Number()));
257 } else if (r.IsDouble()) {
258 Abort("unsupported double immediate");
259 }
260 ASSERT(r.IsTagged());
261 return Immediate(literal);
262}
263
264
265Operand LCodeGen::ToOperand(LOperand* op) const {
266 if (op->IsRegister()) return Operand(ToRegister(op));
267 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
268 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
269 int index = op->index();
270 if (index >= 0) {
271 // Local or spill slot. Skip the frame pointer, function, and
272 // context in the fixed part of the frame.
273 return Operand(ebp, -(index + 3) * kPointerSize);
274 } else {
275 // Incoming parameter. Skip the return address.
276 return Operand(ebp, -(index - 1) * kPointerSize);
277 }
278}
279
280
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000281Operand LCodeGen::HighOperand(LOperand* op) {
282 ASSERT(op->IsDoubleStackSlot());
283 int index = op->index();
284 int offset = (index >= 0) ? index + 3 : index - 1;
285 return Operand(ebp, -offset * kPointerSize);
286}
287
288
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000289void LCodeGen::WriteTranslation(LEnvironment* environment,
290 Translation* translation) {
291 if (environment == NULL) return;
292
293 // The translation includes one command per value in the environment.
294 int translation_size = environment->values()->length();
295 // The output frame height does not include the parameters.
296 int height = translation_size - environment->parameter_count();
297
298 WriteTranslation(environment->outer(), translation);
299 int closure_id = DefineDeoptimizationLiteral(environment->closure());
300 translation->BeginFrame(environment->ast_id(), closure_id, height);
301 for (int i = 0; i < translation_size; ++i) {
302 LOperand* value = environment->values()->at(i);
303 // spilled_registers_ and spilled_double_registers_ are either
304 // both NULL or both set.
305 if (environment->spilled_registers() != NULL && value != NULL) {
306 if (value->IsRegister() &&
307 environment->spilled_registers()[value->index()] != NULL) {
308 translation->MarkDuplicate();
309 AddToTranslation(translation,
310 environment->spilled_registers()[value->index()],
311 environment->HasTaggedValueAt(i));
312 } else if (
313 value->IsDoubleRegister() &&
314 environment->spilled_double_registers()[value->index()] != NULL) {
315 translation->MarkDuplicate();
316 AddToTranslation(
317 translation,
318 environment->spilled_double_registers()[value->index()],
319 false);
320 }
321 }
322
323 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
324 }
325}
326
327
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000328void LCodeGen::AddToTranslation(Translation* translation,
329 LOperand* op,
330 bool is_tagged) {
331 if (op == NULL) {
332 // TODO(twuerthinger): Introduce marker operands to indicate that this value
333 // is not present and must be reconstructed from the deoptimizer. Currently
334 // this is only used for the arguments object.
335 translation->StoreArgumentsObject();
336 } else if (op->IsStackSlot()) {
337 if (is_tagged) {
338 translation->StoreStackSlot(op->index());
339 } else {
340 translation->StoreInt32StackSlot(op->index());
341 }
342 } else if (op->IsDoubleStackSlot()) {
343 translation->StoreDoubleStackSlot(op->index());
344 } else if (op->IsArgument()) {
345 ASSERT(is_tagged);
346 int src_index = StackSlotCount() + op->index();
347 translation->StoreStackSlot(src_index);
348 } else if (op->IsRegister()) {
349 Register reg = ToRegister(op);
350 if (is_tagged) {
351 translation->StoreRegister(reg);
352 } else {
353 translation->StoreInt32Register(reg);
354 }
355 } else if (op->IsDoubleRegister()) {
356 XMMRegister reg = ToDoubleRegister(op);
357 translation->StoreDoubleRegister(reg);
358 } else if (op->IsConstantOperand()) {
359 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
360 int src_index = DefineDeoptimizationLiteral(literal);
361 translation->StoreLiteral(src_index);
362 } else {
363 UNREACHABLE();
364 }
365}
366
367
368void LCodeGen::CallCode(Handle<Code> code,
369 RelocInfo::Mode mode,
370 LInstruction* instr) {
kmillikin@chromium.org31b12772011-02-02 16:08:26 +0000371 ASSERT(instr != NULL);
372 LPointerMap* pointers = instr->pointer_map();
373 RecordPosition(pointers->position());
374 __ call(code, mode);
375 RegisterLazyDeoptimization(instr);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +0000376
377 // Signal that we don't inline smi code before these stubs in the
378 // optimizing code generator.
379 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
380 code->kind() == Code::COMPARE_IC) {
381 __ nop();
382 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000383}
384
385
386void LCodeGen::CallRuntime(Runtime::Function* function,
387 int num_arguments,
388 LInstruction* instr) {
389 ASSERT(instr != NULL);
kmillikin@chromium.org31b12772011-02-02 16:08:26 +0000390 ASSERT(instr->HasPointerMap());
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000391 LPointerMap* pointers = instr->pointer_map();
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000392 RecordPosition(pointers->position());
393
394 __ CallRuntime(function, num_arguments);
kmillikin@chromium.org31b12772011-02-02 16:08:26 +0000395 RegisterLazyDeoptimization(instr);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000396}
397
398
399void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
400 // Create the environment to bailout to. If the call has side effects
401 // execution has to continue after the call otherwise execution can continue
402 // from a previous bailout point repeating the call.
fschneider@chromium.org1df6b472011-01-26 08:23:03 +0000403 LEnvironment* deoptimization_environment;
404 if (instr->HasDeoptimizationEnvironment()) {
405 deoptimization_environment = instr->deoptimization_environment();
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000406 } else {
fschneider@chromium.org1df6b472011-01-26 08:23:03 +0000407 deoptimization_environment = instr->environment();
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000408 }
409
fschneider@chromium.org1df6b472011-01-26 08:23:03 +0000410 RegisterEnvironmentForDeoptimization(deoptimization_environment);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000411 RecordSafepoint(instr->pointer_map(),
fschneider@chromium.org1df6b472011-01-26 08:23:03 +0000412 deoptimization_environment->deoptimization_index());
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000413}
414
415
416void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
417 if (!environment->HasBeenRegistered()) {
418 // Physical stack frame layout:
419 // -x ............. -4 0 ..................................... y
420 // [incoming arguments] [spill slots] [pushed outgoing arguments]
421
422 // Layout of the environment:
423 // 0 ..................................................... size-1
424 // [parameters] [locals] [expression stack including arguments]
425
426 // Layout of the translation:
427 // 0 ........................................................ size - 1 + 4
428 // [expression stack including arguments] [locals] [4 words] [parameters]
429 // |>------------ translation_size ------------<|
430
431 int frame_count = 0;
432 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
433 ++frame_count;
434 }
435 Translation translation(&translations_, frame_count);
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000436 WriteTranslation(environment, &translation);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000437 int deoptimization_index = deoptimizations_.length();
438 environment->Register(deoptimization_index, translation.index());
439 deoptimizations_.Add(environment);
440 }
441}
442
443
444void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
445 RegisterEnvironmentForDeoptimization(environment);
446 ASSERT(environment->HasBeenRegistered());
447 int id = environment->deoptimization_index();
448 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
449 ASSERT(entry != NULL);
450 if (entry == NULL) {
451 Abort("bailout was not prepared");
452 return;
453 }
454
455 if (FLAG_deopt_every_n_times != 0) {
456 Handle<SharedFunctionInfo> shared(info_->shared_info());
457 Label no_deopt;
458 __ pushfd();
459 __ push(eax);
460 __ push(ebx);
461 __ mov(ebx, shared);
462 __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
463 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
464 __ j(not_zero, &no_deopt);
465 if (FLAG_trap_on_deopt) __ int3();
466 __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
467 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
468 __ pop(ebx);
469 __ pop(eax);
470 __ popfd();
471 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
472
473 __ bind(&no_deopt);
474 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
475 __ pop(ebx);
476 __ pop(eax);
477 __ popfd();
478 }
479
480 if (cc == no_condition) {
481 if (FLAG_trap_on_deopt) __ int3();
482 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
483 } else {
484 if (FLAG_trap_on_deopt) {
485 NearLabel done;
486 __ j(NegateCondition(cc), &done);
487 __ int3();
488 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
489 __ bind(&done);
490 } else {
491 __ j(cc, entry, RelocInfo::RUNTIME_ENTRY, not_taken);
492 }
493 }
494}
495
496
497void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
498 int length = deoptimizations_.length();
499 if (length == 0) return;
500 ASSERT(FLAG_deopt);
501 Handle<DeoptimizationInputData> data =
502 Factory::NewDeoptimizationInputData(length, TENURED);
503
504 data->SetTranslationByteArray(*translations_.CreateByteArray());
505 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
506
507 Handle<FixedArray> literals =
508 Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
509 for (int i = 0; i < deoptimization_literals_.length(); i++) {
510 literals->set(i, *deoptimization_literals_[i]);
511 }
512 data->SetLiteralArray(*literals);
513
514 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
515 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
516
517 // Populate the deoptimization entries.
518 for (int i = 0; i < length; i++) {
519 LEnvironment* env = deoptimizations_[i];
520 data->SetAstId(i, Smi::FromInt(env->ast_id()));
521 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
522 data->SetArgumentsStackHeight(i,
523 Smi::FromInt(env->arguments_stack_height()));
524 }
525 code->set_deoptimization_data(*data);
526}
527
528
529int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
530 int result = deoptimization_literals_.length();
531 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
532 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
533 }
534 deoptimization_literals_.Add(literal);
535 return result;
536}
537
538
539void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
540 ASSERT(deoptimization_literals_.length() == 0);
541
542 const ZoneList<Handle<JSFunction> >* inlined_closures =
543 chunk()->inlined_closures();
544
545 for (int i = 0, length = inlined_closures->length();
546 i < length;
547 i++) {
548 DefineDeoptimizationLiteral(inlined_closures->at(i));
549 }
550
551 inlined_function_count_ = deoptimization_literals_.length();
552}
553
554
ager@chromium.org378b34e2011-01-28 08:04:38 +0000555void LCodeGen::RecordSafepoint(
556 LPointerMap* pointers,
557 Safepoint::Kind kind,
558 int arguments,
559 int deoptimization_index) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000560 const ZoneList<LOperand*>* operands = pointers->operands();
561 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
ager@chromium.org378b34e2011-01-28 08:04:38 +0000562 kind, arguments, deoptimization_index);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000563 for (int i = 0; i < operands->length(); i++) {
564 LOperand* pointer = operands->at(i);
565 if (pointer->IsStackSlot()) {
566 safepoint.DefinePointerSlot(pointer->index());
ager@chromium.org378b34e2011-01-28 08:04:38 +0000567 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
568 safepoint.DefinePointerRegister(ToRegister(pointer));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000569 }
570 }
ager@chromium.org378b34e2011-01-28 08:04:38 +0000571 if (kind & Safepoint::kWithRegisters) {
572 // Register esi always contains a pointer to the context.
573 safepoint.DefinePointerRegister(esi);
574 }
575}
576
577
578void LCodeGen::RecordSafepoint(LPointerMap* pointers,
579 int deoptimization_index) {
580 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000581}
582
583
584void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
585 int arguments,
586 int deoptimization_index) {
ager@chromium.org378b34e2011-01-28 08:04:38 +0000587 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
588 deoptimization_index);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000589}
590
591
592void LCodeGen::RecordPosition(int position) {
593 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
594 masm()->positions_recorder()->RecordPosition(position);
595}
596
597
598void LCodeGen::DoLabel(LLabel* label) {
599 if (label->is_loop_header()) {
600 Comment(";;; B%d - LOOP entry", label->block_id());
601 } else {
602 Comment(";;; B%d", label->block_id());
603 }
604 __ bind(label->label());
605 current_block_ = label->block_id();
606 LCodeGen::DoGap(label);
607}
608
609
610void LCodeGen::DoParallelMove(LParallelMove* move) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000611 resolver_.Resolve(move);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000612}
613
614
615void LCodeGen::DoGap(LGap* gap) {
616 for (int i = LGap::FIRST_INNER_POSITION;
617 i <= LGap::LAST_INNER_POSITION;
618 i++) {
619 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
620 LParallelMove* move = gap->GetParallelMove(inner_pos);
621 if (move != NULL) DoParallelMove(move);
622 }
623
624 LInstruction* next = GetNextInstruction();
625 if (next != NULL && next->IsLazyBailout()) {
626 int pc = masm()->pc_offset();
627 safepoints_.SetPcAfterGap(pc);
628 }
629}
630
631
632void LCodeGen::DoParameter(LParameter* instr) {
633 // Nothing to do.
634}
635
636
637void LCodeGen::DoCallStub(LCallStub* instr) {
638 ASSERT(ToRegister(instr->result()).is(eax));
639 switch (instr->hydrogen()->major_key()) {
640 case CodeStub::RegExpConstructResult: {
641 RegExpConstructResultStub stub;
642 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
643 break;
644 }
645 case CodeStub::RegExpExec: {
646 RegExpExecStub stub;
647 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
648 break;
649 }
650 case CodeStub::SubString: {
651 SubStringStub stub;
652 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
653 break;
654 }
655 case CodeStub::StringCharAt: {
656 StringCharAtStub stub;
657 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
658 break;
659 }
660 case CodeStub::MathPow: {
661 MathPowStub stub;
662 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
663 break;
664 }
665 case CodeStub::NumberToString: {
666 NumberToStringStub stub;
667 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
668 break;
669 }
670 case CodeStub::StringAdd: {
671 StringAddStub stub(NO_STRING_ADD_FLAGS);
672 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
673 break;
674 }
675 case CodeStub::StringCompare: {
676 StringCompareStub stub;
677 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
678 break;
679 }
680 case CodeStub::TranscendentalCache: {
whesse@chromium.org023421e2010-12-21 12:19:12 +0000681 TranscendentalCacheStub stub(instr->transcendental_type(),
682 TranscendentalCacheStub::TAGGED);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000683 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
684 break;
685 }
686 default:
687 UNREACHABLE();
688 }
689}
690
691
692void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
693 // Nothing to do.
694}
695
696
697void LCodeGen::DoModI(LModI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000698 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000699 ASSERT(ToRegister(instr->result()).is(edx));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000700 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
701 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
702 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000703
704 Register right_reg = ToRegister(right);
705
706 // Check for x % 0.
707 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
708 __ test(right_reg, ToOperand(right));
709 DeoptimizeIf(zero, instr->environment());
710 }
711
712 // Sign extend to edx.
713 __ cdq();
714
715 // Check for (0 % -x) that will produce negative zero.
716 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
717 NearLabel positive_left;
718 NearLabel done;
719 __ test(eax, Operand(eax));
720 __ j(not_sign, &positive_left);
721 __ idiv(right_reg);
722
723 // Test the remainder for 0, because then the result would be -0.
724 __ test(edx, Operand(edx));
725 __ j(not_zero, &done);
726
727 DeoptimizeIf(no_condition, instr->environment());
728 __ bind(&positive_left);
729 __ idiv(right_reg);
730 __ bind(&done);
731 } else {
732 __ idiv(right_reg);
733 }
734}
735
736
737void LCodeGen::DoDivI(LDivI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000738 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000739 ASSERT(ToRegister(instr->result()).is(eax));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000740 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
741 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
742 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000743
744 Register left_reg = eax;
745
746 // Check for x / 0.
747 Register right_reg = ToRegister(right);
748 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
749 __ test(right_reg, ToOperand(right));
750 DeoptimizeIf(zero, instr->environment());
751 }
752
753 // Check for (0 / -x) that will produce negative zero.
754 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
755 NearLabel left_not_zero;
756 __ test(left_reg, Operand(left_reg));
757 __ j(not_zero, &left_not_zero);
758 __ test(right_reg, ToOperand(right));
759 DeoptimizeIf(sign, instr->environment());
760 __ bind(&left_not_zero);
761 }
762
763 // Check for (-kMinInt / -1).
764 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
765 NearLabel left_not_min_int;
766 __ cmp(left_reg, kMinInt);
767 __ j(not_zero, &left_not_min_int);
768 __ cmp(right_reg, -1);
769 DeoptimizeIf(zero, instr->environment());
770 __ bind(&left_not_min_int);
771 }
772
773 // Sign extend to edx.
774 __ cdq();
775 __ idiv(right_reg);
776
777 // Deoptimize if remainder is not 0.
778 __ test(edx, Operand(edx));
779 DeoptimizeIf(not_zero, instr->environment());
780}
781
782
783void LCodeGen::DoMulI(LMulI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000784 Register left = ToRegister(instr->InputAt(0));
785 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000786
787 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000788 __ mov(ToRegister(instr->TempAt(0)), left);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000789 }
790
791 if (right->IsConstantOperand()) {
792 __ imul(left, left, ToInteger32(LConstantOperand::cast(right)));
793 } else {
794 __ imul(left, ToOperand(right));
795 }
796
797 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
798 DeoptimizeIf(overflow, instr->environment());
799 }
800
801 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
802 // Bail out if the result is supposed to be negative zero.
803 NearLabel done;
804 __ test(left, Operand(left));
805 __ j(not_zero, &done);
806 if (right->IsConstantOperand()) {
erik.corry@gmail.comd91075f2011-02-10 07:45:38 +0000807 if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000808 DeoptimizeIf(no_condition, instr->environment());
809 }
810 } else {
811 // Test the non-zero operand for negative sign.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000812 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000813 DeoptimizeIf(sign, instr->environment());
814 }
815 __ bind(&done);
816 }
817}
818
819
820void LCodeGen::DoBitI(LBitI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000821 LOperand* left = instr->InputAt(0);
822 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000823 ASSERT(left->Equals(instr->result()));
824 ASSERT(left->IsRegister());
825
826 if (right->IsConstantOperand()) {
827 int right_operand = ToInteger32(LConstantOperand::cast(right));
828 switch (instr->op()) {
829 case Token::BIT_AND:
830 __ and_(ToRegister(left), right_operand);
831 break;
832 case Token::BIT_OR:
833 __ or_(ToRegister(left), right_operand);
834 break;
835 case Token::BIT_XOR:
836 __ xor_(ToRegister(left), right_operand);
837 break;
838 default:
839 UNREACHABLE();
840 break;
841 }
842 } else {
843 switch (instr->op()) {
844 case Token::BIT_AND:
845 __ and_(ToRegister(left), ToOperand(right));
846 break;
847 case Token::BIT_OR:
848 __ or_(ToRegister(left), ToOperand(right));
849 break;
850 case Token::BIT_XOR:
851 __ xor_(ToRegister(left), ToOperand(right));
852 break;
853 default:
854 UNREACHABLE();
855 break;
856 }
857 }
858}
859
860
861void LCodeGen::DoShiftI(LShiftI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000862 LOperand* left = instr->InputAt(0);
863 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000864 ASSERT(left->Equals(instr->result()));
865 ASSERT(left->IsRegister());
866 if (right->IsRegister()) {
867 ASSERT(ToRegister(right).is(ecx));
868
869 switch (instr->op()) {
870 case Token::SAR:
871 __ sar_cl(ToRegister(left));
872 break;
873 case Token::SHR:
874 __ shr_cl(ToRegister(left));
875 if (instr->can_deopt()) {
876 __ test(ToRegister(left), Immediate(0x80000000));
877 DeoptimizeIf(not_zero, instr->environment());
878 }
879 break;
880 case Token::SHL:
881 __ shl_cl(ToRegister(left));
882 break;
883 default:
884 UNREACHABLE();
885 break;
886 }
887 } else {
888 int value = ToInteger32(LConstantOperand::cast(right));
889 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
890 switch (instr->op()) {
891 case Token::SAR:
892 if (shift_count != 0) {
893 __ sar(ToRegister(left), shift_count);
894 }
895 break;
896 case Token::SHR:
897 if (shift_count == 0 && instr->can_deopt()) {
898 __ test(ToRegister(left), Immediate(0x80000000));
899 DeoptimizeIf(not_zero, instr->environment());
900 } else {
901 __ shr(ToRegister(left), shift_count);
902 }
903 break;
904 case Token::SHL:
905 if (shift_count != 0) {
906 __ shl(ToRegister(left), shift_count);
907 }
908 break;
909 default:
910 UNREACHABLE();
911 break;
912 }
913 }
914}
915
916
917void LCodeGen::DoSubI(LSubI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000918 LOperand* left = instr->InputAt(0);
919 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000920 ASSERT(left->Equals(instr->result()));
921
922 if (right->IsConstantOperand()) {
923 __ sub(ToOperand(left), ToImmediate(right));
924 } else {
925 __ sub(ToRegister(left), ToOperand(right));
926 }
927 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
928 DeoptimizeIf(overflow, instr->environment());
929 }
930}
931
932
933void LCodeGen::DoConstantI(LConstantI* instr) {
934 ASSERT(instr->result()->IsRegister());
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000935 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000936}
937
938
939void LCodeGen::DoConstantD(LConstantD* instr) {
940 ASSERT(instr->result()->IsDoubleRegister());
941 XMMRegister res = ToDoubleRegister(instr->result());
942 double v = instr->value();
943 // Use xor to produce +0.0 in a fast and compact way, but avoid to
944 // do so if the constant is -0.0.
945 if (BitCast<uint64_t, double>(v) == 0) {
946 __ xorpd(res, res);
947 } else {
erik.corry@gmail.comd91075f2011-02-10 07:45:38 +0000948 Register temp = ToRegister(instr->TempAt(0));
949 uint64_t int_val = BitCast<uint64_t, double>(v);
950 int32_t lower = static_cast<int32_t>(int_val);
951 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
952 if (CpuFeatures::IsSupported(SSE4_1)) {
953 CpuFeatures::Scope scope(SSE4_1);
954 if (lower != 0) {
955 __ Set(temp, Immediate(lower));
956 __ movd(res, Operand(temp));
957 __ Set(temp, Immediate(upper));
958 __ pinsrd(res, Operand(temp), 1);
959 } else {
960 __ xorpd(res, res);
961 __ Set(temp, Immediate(upper));
962 __ pinsrd(res, Operand(temp), 1);
963 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000964 } else {
erik.corry@gmail.comd91075f2011-02-10 07:45:38 +0000965 __ Set(temp, Immediate(upper));
966 __ movd(res, Operand(temp));
967 __ psllq(res, 32);
968 if (lower != 0) {
969 __ Set(temp, Immediate(lower));
970 __ movd(xmm0, Operand(temp));
971 __ por(res, xmm0);
972 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000973 }
974 }
975}
976
977
978void LCodeGen::DoConstantT(LConstantT* instr) {
979 ASSERT(instr->result()->IsRegister());
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000980 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000981}
982
983
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000984void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000985 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000986 Register array = ToRegister(instr->InputAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000987 __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
988}
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000989
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000990
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000991void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
992 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000993 Register array = ToRegister(instr->InputAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000994 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000995}
996
997
998void LCodeGen::DoValueOf(LValueOf* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000999 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001000 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001001 Register map = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001002 ASSERT(input.is(result));
1003 NearLabel done;
1004 // If the object is a smi return the object.
1005 __ test(input, Immediate(kSmiTagMask));
1006 __ j(zero, &done);
1007
1008 // If the object is not a value type, return the object.
1009 __ CmpObjectType(input, JS_VALUE_TYPE, map);
1010 __ j(not_equal, &done);
1011 __ mov(result, FieldOperand(input, JSValue::kValueOffset));
1012
1013 __ bind(&done);
1014}
1015
1016
1017void LCodeGen::DoBitNotI(LBitNotI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001018 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001019 ASSERT(input->Equals(instr->result()));
1020 __ not_(ToRegister(input));
1021}
1022
1023
1024void LCodeGen::DoThrow(LThrow* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001025 __ push(ToOperand(instr->InputAt(0)));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001026 CallRuntime(Runtime::kThrow, 1, instr);
1027
1028 if (FLAG_debug_code) {
1029 Comment("Unreachable code.");
1030 __ int3();
1031 }
1032}
1033
1034
1035void LCodeGen::DoAddI(LAddI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001036 LOperand* left = instr->InputAt(0);
1037 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001038 ASSERT(left->Equals(instr->result()));
1039
1040 if (right->IsConstantOperand()) {
1041 __ add(ToOperand(left), ToImmediate(right));
1042 } else {
1043 __ add(ToRegister(left), ToOperand(right));
1044 }
1045
1046 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1047 DeoptimizeIf(overflow, instr->environment());
1048 }
1049}
1050
1051
1052void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001053 LOperand* left = instr->InputAt(0);
1054 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001055 // Modulo uses a fixed result register.
1056 ASSERT(instr->op() == Token::MOD || left->Equals(instr->result()));
1057 switch (instr->op()) {
1058 case Token::ADD:
1059 __ addsd(ToDoubleRegister(left), ToDoubleRegister(right));
1060 break;
1061 case Token::SUB:
1062 __ subsd(ToDoubleRegister(left), ToDoubleRegister(right));
1063 break;
1064 case Token::MUL:
1065 __ mulsd(ToDoubleRegister(left), ToDoubleRegister(right));
1066 break;
1067 case Token::DIV:
1068 __ divsd(ToDoubleRegister(left), ToDoubleRegister(right));
1069 break;
1070 case Token::MOD: {
1071 // Pass two doubles as arguments on the stack.
1072 __ PrepareCallCFunction(4, eax);
1073 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
1074 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
1075 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4);
1076
1077 // Return value is in st(0) on ia32.
1078 // Store it into the (fixed) result register.
1079 __ sub(Operand(esp), Immediate(kDoubleSize));
1080 __ fstp_d(Operand(esp, 0));
1081 __ movdbl(ToDoubleRegister(instr->result()), Operand(esp, 0));
1082 __ add(Operand(esp), Immediate(kDoubleSize));
1083 break;
1084 }
1085 default:
1086 UNREACHABLE();
1087 break;
1088 }
1089}
1090
1091
1092void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001093 ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1094 ASSERT(ToRegister(instr->InputAt(1)).is(eax));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001095 ASSERT(ToRegister(instr->result()).is(eax));
1096
1097 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1098 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1099}
1100
1101
1102int LCodeGen::GetNextEmittedBlock(int block) {
1103 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1104 LLabel* label = chunk_->GetLabel(i);
1105 if (!label->HasReplacement()) return i;
1106 }
1107 return -1;
1108}
1109
1110
1111void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1112 int next_block = GetNextEmittedBlock(current_block_);
1113 right_block = chunk_->LookupDestination(right_block);
1114 left_block = chunk_->LookupDestination(left_block);
1115
1116 if (right_block == left_block) {
1117 EmitGoto(left_block);
1118 } else if (left_block == next_block) {
1119 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1120 } else if (right_block == next_block) {
1121 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1122 } else {
1123 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1124 __ jmp(chunk_->GetAssemblyLabel(right_block));
1125 }
1126}
1127
1128
1129void LCodeGen::DoBranch(LBranch* instr) {
1130 int true_block = chunk_->LookupDestination(instr->true_block_id());
1131 int false_block = chunk_->LookupDestination(instr->false_block_id());
1132
1133 Representation r = instr->hydrogen()->representation();
1134 if (r.IsInteger32()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001135 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001136 __ test(reg, Operand(reg));
1137 EmitBranch(true_block, false_block, not_zero);
1138 } else if (r.IsDouble()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001139 XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001140 __ xorpd(xmm0, xmm0);
1141 __ ucomisd(reg, xmm0);
1142 EmitBranch(true_block, false_block, not_equal);
1143 } else {
1144 ASSERT(r.IsTagged());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001145 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001146 if (instr->hydrogen()->type().IsBoolean()) {
1147 __ cmp(reg, Factory::true_value());
1148 EmitBranch(true_block, false_block, equal);
1149 } else {
1150 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1151 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1152
1153 __ cmp(reg, Factory::undefined_value());
1154 __ j(equal, false_label);
1155 __ cmp(reg, Factory::true_value());
1156 __ j(equal, true_label);
1157 __ cmp(reg, Factory::false_value());
1158 __ j(equal, false_label);
1159 __ test(reg, Operand(reg));
1160 __ j(equal, false_label);
1161 __ test(reg, Immediate(kSmiTagMask));
1162 __ j(zero, true_label);
1163
1164 // Test for double values. Zero is false.
1165 NearLabel call_stub;
1166 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1167 Factory::heap_number_map());
1168 __ j(not_equal, &call_stub);
1169 __ fldz();
1170 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1171 __ FCmp();
1172 __ j(zero, false_label);
1173 __ jmp(true_label);
1174
1175 // The conversion stub doesn't cause garbage collections so it's
1176 // safe to not record a safepoint after the call.
1177 __ bind(&call_stub);
1178 ToBooleanStub stub;
1179 __ pushad();
1180 __ push(reg);
1181 __ CallStub(&stub);
1182 __ test(eax, Operand(eax));
1183 __ popad();
1184 EmitBranch(true_block, false_block, not_zero);
1185 }
1186 }
1187}
1188
1189
1190void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1191 block = chunk_->LookupDestination(block);
1192 int next_block = GetNextEmittedBlock(current_block_);
1193 if (block != next_block) {
1194 // Perform stack overflow check if this goto needs it before jumping.
1195 if (deferred_stack_check != NULL) {
1196 ExternalReference stack_limit =
1197 ExternalReference::address_of_stack_limit();
1198 __ cmp(esp, Operand::StaticVariable(stack_limit));
1199 __ j(above_equal, chunk_->GetAssemblyLabel(block));
1200 __ jmp(deferred_stack_check->entry());
1201 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1202 } else {
1203 __ jmp(chunk_->GetAssemblyLabel(block));
1204 }
1205 }
1206}
1207
1208
1209void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1210 __ pushad();
1211 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1212 RecordSafepointWithRegisters(
1213 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1214 __ popad();
1215}
1216
1217void LCodeGen::DoGoto(LGoto* instr) {
1218 class DeferredStackCheck: public LDeferredCode {
1219 public:
1220 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1221 : LDeferredCode(codegen), instr_(instr) { }
1222 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1223 private:
1224 LGoto* instr_;
1225 };
1226
1227 DeferredStackCheck* deferred = NULL;
1228 if (instr->include_stack_check()) {
1229 deferred = new DeferredStackCheck(this, instr);
1230 }
1231 EmitGoto(instr->block_id(), deferred);
1232}
1233
1234
1235Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1236 Condition cond = no_condition;
1237 switch (op) {
1238 case Token::EQ:
1239 case Token::EQ_STRICT:
1240 cond = equal;
1241 break;
1242 case Token::LT:
1243 cond = is_unsigned ? below : less;
1244 break;
1245 case Token::GT:
1246 cond = is_unsigned ? above : greater;
1247 break;
1248 case Token::LTE:
1249 cond = is_unsigned ? below_equal : less_equal;
1250 break;
1251 case Token::GTE:
1252 cond = is_unsigned ? above_equal : greater_equal;
1253 break;
1254 case Token::IN:
1255 case Token::INSTANCEOF:
1256 default:
1257 UNREACHABLE();
1258 }
1259 return cond;
1260}
1261
1262
1263void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1264 if (right->IsConstantOperand()) {
1265 __ cmp(ToOperand(left), ToImmediate(right));
1266 } else {
1267 __ cmp(ToRegister(left), ToOperand(right));
1268 }
1269}
1270
1271
1272void LCodeGen::DoCmpID(LCmpID* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001273 LOperand* left = instr->InputAt(0);
1274 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001275 LOperand* result = instr->result();
1276
1277 NearLabel unordered;
1278 if (instr->is_double()) {
1279 // Don't base result on EFLAGS when a NaN is involved. Instead
1280 // jump to the unordered case, which produces a false value.
1281 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1282 __ j(parity_even, &unordered, not_taken);
1283 } else {
1284 EmitCmpI(left, right);
1285 }
1286
1287 NearLabel done;
1288 Condition cc = TokenToCondition(instr->op(), instr->is_double());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001289 __ mov(ToRegister(result), Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001290 __ j(cc, &done);
1291
1292 __ bind(&unordered);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001293 __ mov(ToRegister(result), Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001294 __ bind(&done);
1295}
1296
1297
1298void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001299 LOperand* left = instr->InputAt(0);
1300 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001301 int false_block = chunk_->LookupDestination(instr->false_block_id());
1302 int true_block = chunk_->LookupDestination(instr->true_block_id());
1303
1304 if (instr->is_double()) {
1305 // Don't base result on EFLAGS when a NaN is involved. Instead
1306 // jump to the false block.
1307 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1308 __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
1309 } else {
1310 EmitCmpI(left, right);
1311 }
1312
1313 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1314 EmitBranch(true_block, false_block, cc);
1315}
1316
1317
1318void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001319 Register left = ToRegister(instr->InputAt(0));
1320 Register right = ToRegister(instr->InputAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001321 Register result = ToRegister(instr->result());
1322
1323 __ cmp(left, Operand(right));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001324 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001325 NearLabel done;
1326 __ j(equal, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001327 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001328 __ bind(&done);
1329}
1330
1331
1332void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001333 Register left = ToRegister(instr->InputAt(0));
1334 Register right = ToRegister(instr->InputAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001335 int false_block = chunk_->LookupDestination(instr->false_block_id());
1336 int true_block = chunk_->LookupDestination(instr->true_block_id());
1337
1338 __ cmp(left, Operand(right));
1339 EmitBranch(true_block, false_block, equal);
1340}
1341
1342
1343void LCodeGen::DoIsNull(LIsNull* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001344 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001345 Register result = ToRegister(instr->result());
1346
1347 // TODO(fsc): If the expression is known to be a smi, then it's
1348 // definitely not null. Materialize false.
1349
1350 __ cmp(reg, Factory::null_value());
1351 if (instr->is_strict()) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001352 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001353 NearLabel done;
1354 __ j(equal, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001355 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001356 __ bind(&done);
1357 } else {
1358 NearLabel true_value, false_value, done;
1359 __ j(equal, &true_value);
1360 __ cmp(reg, Factory::undefined_value());
1361 __ j(equal, &true_value);
1362 __ test(reg, Immediate(kSmiTagMask));
1363 __ j(zero, &false_value);
1364 // Check for undetectable objects by looking in the bit field in
1365 // the map. The object has already been smi checked.
1366 Register scratch = result;
1367 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1368 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1369 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1370 __ j(not_zero, &true_value);
1371 __ bind(&false_value);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001372 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001373 __ jmp(&done);
1374 __ bind(&true_value);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001375 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001376 __ bind(&done);
1377 }
1378}
1379
1380
1381void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001382 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001383
1384 // TODO(fsc): If the expression is known to be a smi, then it's
1385 // definitely not null. Jump to the false block.
1386
1387 int true_block = chunk_->LookupDestination(instr->true_block_id());
1388 int false_block = chunk_->LookupDestination(instr->false_block_id());
1389
1390 __ cmp(reg, Factory::null_value());
1391 if (instr->is_strict()) {
1392 EmitBranch(true_block, false_block, equal);
1393 } else {
1394 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1395 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1396 __ j(equal, true_label);
1397 __ cmp(reg, Factory::undefined_value());
1398 __ j(equal, true_label);
1399 __ test(reg, Immediate(kSmiTagMask));
1400 __ j(zero, false_label);
1401 // Check for undetectable objects by looking in the bit field in
1402 // the map. The object has already been smi checked.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001403 Register scratch = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001404 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1405 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1406 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1407 EmitBranch(true_block, false_block, not_zero);
1408 }
1409}
1410
1411
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001412Condition LCodeGen::EmitIsObject(Register input,
1413 Register temp1,
1414 Register temp2,
1415 Label* is_not_object,
1416 Label* is_object) {
1417 ASSERT(!input.is(temp1));
1418 ASSERT(!input.is(temp2));
1419 ASSERT(!temp1.is(temp2));
1420
1421 __ test(input, Immediate(kSmiTagMask));
1422 __ j(equal, is_not_object);
1423
1424 __ cmp(input, Factory::null_value());
1425 __ j(equal, is_object);
1426
1427 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1428 // Undetectable objects behave like undefined.
1429 __ movzx_b(temp2, FieldOperand(temp1, Map::kBitFieldOffset));
1430 __ test(temp2, Immediate(1 << Map::kIsUndetectable));
1431 __ j(not_zero, is_not_object);
1432
1433 __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset));
1434 __ cmp(temp2, FIRST_JS_OBJECT_TYPE);
1435 __ j(below, is_not_object);
1436 __ cmp(temp2, LAST_JS_OBJECT_TYPE);
1437 return below_equal;
1438}
1439
1440
1441void LCodeGen::DoIsObject(LIsObject* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001442 Register reg = ToRegister(instr->InputAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001443 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001444 Register temp = ToRegister(instr->TempAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001445 Label is_false, is_true, done;
1446
1447 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1448 __ j(true_cond, &is_true);
1449
1450 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001451 __ mov(result, Factory::false_value());
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001452 __ jmp(&done);
1453
1454 __ bind(&is_true);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001455 __ mov(result, Factory::true_value());
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001456
1457 __ bind(&done);
1458}
1459
1460
1461void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001462 Register reg = ToRegister(instr->InputAt(0));
1463 Register temp = ToRegister(instr->TempAt(0));
1464 Register temp2 = ToRegister(instr->TempAt(1));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001465
1466 int true_block = chunk_->LookupDestination(instr->true_block_id());
1467 int false_block = chunk_->LookupDestination(instr->false_block_id());
1468 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1469 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1470
1471 Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label);
1472
1473 EmitBranch(true_block, false_block, true_cond);
1474}
1475
1476
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001477void LCodeGen::DoIsSmi(LIsSmi* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001478 Operand input = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001479 Register result = ToRegister(instr->result());
1480
1481 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1482 __ test(input, Immediate(kSmiTagMask));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001483 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001484 NearLabel done;
1485 __ j(zero, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001486 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001487 __ bind(&done);
1488}
1489
1490
1491void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001492 Operand input = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001493
1494 int true_block = chunk_->LookupDestination(instr->true_block_id());
1495 int false_block = chunk_->LookupDestination(instr->false_block_id());
1496
1497 __ test(input, Immediate(kSmiTagMask));
1498 EmitBranch(true_block, false_block, zero);
1499}
1500
1501
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001502static InstanceType TestType(HHasInstanceType* instr) {
1503 InstanceType from = instr->from();
1504 InstanceType to = instr->to();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001505 if (from == FIRST_TYPE) return to;
1506 ASSERT(from == to || to == LAST_TYPE);
1507 return from;
1508}
1509
1510
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001511static Condition BranchCondition(HHasInstanceType* instr) {
1512 InstanceType from = instr->from();
1513 InstanceType to = instr->to();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001514 if (from == to) return equal;
1515 if (to == LAST_TYPE) return above_equal;
1516 if (from == FIRST_TYPE) return below_equal;
1517 UNREACHABLE();
1518 return equal;
1519}
1520
1521
1522void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001523 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001524 Register result = ToRegister(instr->result());
1525
1526 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1527 __ test(input, Immediate(kSmiTagMask));
1528 NearLabel done, is_false;
1529 __ j(zero, &is_false);
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001530 __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1531 __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001532 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001533 __ jmp(&done);
1534 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001535 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001536 __ bind(&done);
1537}
1538
1539
1540void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001541 Register input = ToRegister(instr->InputAt(0));
1542 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001543
1544 int true_block = chunk_->LookupDestination(instr->true_block_id());
1545 int false_block = chunk_->LookupDestination(instr->false_block_id());
1546
1547 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1548
1549 __ test(input, Immediate(kSmiTagMask));
1550 __ j(zero, false_label);
1551
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001552 __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1553 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001554}
1555
1556
1557void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001558 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001559 Register result = ToRegister(instr->result());
1560
1561 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001562 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001563 __ test(FieldOperand(input, String::kHashFieldOffset),
1564 Immediate(String::kContainsCachedArrayIndexMask));
1565 NearLabel done;
1566 __ j(not_zero, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001567 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001568 __ bind(&done);
1569}
1570
1571
1572void LCodeGen::DoHasCachedArrayIndexAndBranch(
1573 LHasCachedArrayIndexAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001574 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001575
1576 int true_block = chunk_->LookupDestination(instr->true_block_id());
1577 int false_block = chunk_->LookupDestination(instr->false_block_id());
1578
1579 __ test(FieldOperand(input, String::kHashFieldOffset),
1580 Immediate(String::kContainsCachedArrayIndexMask));
1581 EmitBranch(true_block, false_block, not_equal);
1582}
1583
1584
1585// Branches to a label or falls through with the answer in the z flag. Trashes
1586// the temp registers, but not the input. Only input and temp2 may alias.
1587void LCodeGen::EmitClassOfTest(Label* is_true,
1588 Label* is_false,
1589 Handle<String>class_name,
1590 Register input,
1591 Register temp,
1592 Register temp2) {
1593 ASSERT(!input.is(temp));
1594 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1595 __ test(input, Immediate(kSmiTagMask));
1596 __ j(zero, is_false);
1597 __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1598 __ j(below, is_false);
1599
1600 // Map is now in temp.
1601 // Functions have class 'Function'.
1602 __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1603 if (class_name->IsEqualTo(CStrVector("Function"))) {
1604 __ j(equal, is_true);
1605 } else {
1606 __ j(equal, is_false);
1607 }
1608
1609 // Check if the constructor in the map is a function.
1610 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1611
1612 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1613 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1614 // LAST_JS_OBJECT_TYPE.
1615 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1616 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1617
1618 // Objects with a non-function constructor have class 'Object'.
1619 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1620 if (class_name->IsEqualTo(CStrVector("Object"))) {
1621 __ j(not_equal, is_true);
1622 } else {
1623 __ j(not_equal, is_false);
1624 }
1625
1626 // temp now contains the constructor function. Grab the
1627 // instance class name from there.
1628 __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1629 __ mov(temp, FieldOperand(temp,
1630 SharedFunctionInfo::kInstanceClassNameOffset));
1631 // The class name we are testing against is a symbol because it's a literal.
1632 // The name in the constructor is a symbol because of the way the context is
1633 // booted. This routine isn't expected to work for random API-created
1634 // classes and it doesn't have to because you can't access it with natives
1635 // syntax. Since both sides are symbols it is sufficient to use an identity
1636 // comparison.
1637 __ cmp(temp, class_name);
1638 // End with the answer in the z flag.
1639}
1640
1641
1642void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001643 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001644 Register result = ToRegister(instr->result());
1645 ASSERT(input.is(result));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001646 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001647 Handle<String> class_name = instr->hydrogen()->class_name();
1648 NearLabel done;
1649 Label is_true, is_false;
1650
1651 EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input);
1652
1653 __ j(not_equal, &is_false);
1654
1655 __ bind(&is_true);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001656 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001657 __ jmp(&done);
1658
1659 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001660 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001661 __ bind(&done);
1662}
1663
1664
1665void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001666 Register input = ToRegister(instr->InputAt(0));
1667 Register temp = ToRegister(instr->TempAt(0));
1668 Register temp2 = ToRegister(instr->TempAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001669 if (input.is(temp)) {
1670 // Swap.
1671 Register swapper = temp;
1672 temp = temp2;
1673 temp2 = swapper;
1674 }
1675 Handle<String> class_name = instr->hydrogen()->class_name();
1676
1677 int true_block = chunk_->LookupDestination(instr->true_block_id());
1678 int false_block = chunk_->LookupDestination(instr->false_block_id());
1679
1680 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1681 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1682
1683 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1684
1685 EmitBranch(true_block, false_block, equal);
1686}
1687
1688
1689void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001690 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001691 int true_block = instr->true_block_id();
1692 int false_block = instr->false_block_id();
1693
1694 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1695 EmitBranch(true_block, false_block, equal);
1696}
1697
1698
1699void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001700 // Object and function are in fixed registers defined by the stub.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001701 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001702 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1703
1704 NearLabel true_value, done;
1705 __ test(eax, Operand(eax));
1706 __ j(zero, &true_value);
1707 __ mov(ToRegister(instr->result()), Factory::false_value());
1708 __ jmp(&done);
1709 __ bind(&true_value);
1710 __ mov(ToRegister(instr->result()), Factory::true_value());
1711 __ bind(&done);
1712}
1713
1714
1715void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1716 int true_block = chunk_->LookupDestination(instr->true_block_id());
1717 int false_block = chunk_->LookupDestination(instr->false_block_id());
1718
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001719 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001720 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1721 __ test(eax, Operand(eax));
1722 EmitBranch(true_block, false_block, zero);
1723}
1724
1725
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001726void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1727 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1728 public:
1729 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1730 LInstanceOfKnownGlobal* instr)
1731 : LDeferredCode(codegen), instr_(instr) { }
1732 virtual void Generate() {
1733 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1734 }
1735
1736 Label* map_check() { return &map_check_; }
1737
1738 private:
1739 LInstanceOfKnownGlobal* instr_;
1740 Label map_check_;
1741 };
1742
1743 DeferredInstanceOfKnownGlobal* deferred;
1744 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1745
1746 Label done, false_result;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001747 Register object = ToRegister(instr->InputAt(0));
1748 Register temp = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001749
1750 // A Smi is not instance of anything.
1751 __ test(object, Immediate(kSmiTagMask));
1752 __ j(zero, &false_result, not_taken);
1753
1754 // This is the inlined call site instanceof cache. The two occourences of the
1755 // hole value will be patched to the last map/result pair generated by the
1756 // instanceof stub.
1757 NearLabel cache_miss;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001758 Register map = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001759 __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
1760 __ bind(deferred->map_check()); // Label for calculating code patching.
1761 __ cmp(map, Factory::the_hole_value()); // Patched to cached map.
1762 __ j(not_equal, &cache_miss, not_taken);
1763 __ mov(eax, Factory::the_hole_value()); // Patched to either true or false.
1764 __ jmp(&done);
1765
1766 // The inlined call site cache did not match. Check null and string before
1767 // calling the deferred code.
1768 __ bind(&cache_miss);
1769 // Null is not instance of anything.
1770 __ cmp(object, Factory::null_value());
1771 __ j(equal, &false_result);
1772
1773 // String values are not instances of anything.
1774 Condition is_string = masm_->IsObjectStringType(object, temp, temp);
1775 __ j(is_string, &false_result);
1776
1777 // Go to the deferred code.
1778 __ jmp(deferred->entry());
1779
1780 __ bind(&false_result);
1781 __ mov(ToRegister(instr->result()), Factory::false_value());
1782
1783 // Here result has either true or false. Deferred code also produces true or
1784 // false object.
1785 __ bind(deferred->exit());
1786 __ bind(&done);
1787}
1788
1789
1790void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1791 Label* map_check) {
1792 __ PushSafepointRegisters();
1793
1794 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1795 flags = static_cast<InstanceofStub::Flags>(
1796 flags | InstanceofStub::kArgsInRegisters);
1797 flags = static_cast<InstanceofStub::Flags>(
1798 flags | InstanceofStub::kCallSiteInlineCheck);
1799 flags = static_cast<InstanceofStub::Flags>(
1800 flags | InstanceofStub::kReturnTrueFalseObject);
1801 InstanceofStub stub(flags);
1802
1803 // Get the temp register reserved by the instruction. This needs to be edi as
1804 // its slot of the pushing of safepoint registers is used to communicate the
1805 // offset to the location of the map check.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001806 Register temp = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001807 ASSERT(temp.is(edi));
1808 __ mov(InstanceofStub::right(), Immediate(instr->function()));
1809 static const int kAdditionalDelta = 13;
1810 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1811 Label before_push_delta;
1812 __ bind(&before_push_delta);
1813 __ mov(temp, Immediate(delta));
1814 __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp);
1815 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
1816 ASSERT_EQ(kAdditionalDelta,
1817 masm_->SizeOfCodeGeneratedSince(&before_push_delta));
1818 RecordSafepointWithRegisters(
1819 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1820 // Put the result value into the eax slot and restore all registers.
1821 __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax);
1822
1823 __ PopSafepointRegisters();
1824}
1825
1826
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001827static Condition ComputeCompareCondition(Token::Value op) {
1828 switch (op) {
1829 case Token::EQ_STRICT:
1830 case Token::EQ:
1831 return equal;
1832 case Token::LT:
1833 return less;
1834 case Token::GT:
1835 return greater;
1836 case Token::LTE:
1837 return less_equal;
1838 case Token::GTE:
1839 return greater_equal;
1840 default:
1841 UNREACHABLE();
1842 return no_condition;
1843 }
1844}
1845
1846
1847void LCodeGen::DoCmpT(LCmpT* instr) {
1848 Token::Value op = instr->op();
1849
1850 Handle<Code> ic = CompareIC::GetUninitialized(op);
1851 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1852
1853 Condition condition = ComputeCompareCondition(op);
1854 if (op == Token::GT || op == Token::LTE) {
1855 condition = ReverseCondition(condition);
1856 }
1857 NearLabel true_value, done;
1858 __ test(eax, Operand(eax));
1859 __ j(condition, &true_value);
1860 __ mov(ToRegister(instr->result()), Factory::false_value());
1861 __ jmp(&done);
1862 __ bind(&true_value);
1863 __ mov(ToRegister(instr->result()), Factory::true_value());
1864 __ bind(&done);
1865}
1866
1867
1868void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1869 Token::Value op = instr->op();
1870 int true_block = chunk_->LookupDestination(instr->true_block_id());
1871 int false_block = chunk_->LookupDestination(instr->false_block_id());
1872
1873 Handle<Code> ic = CompareIC::GetUninitialized(op);
1874 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1875
1876 // The compare stub expects compare condition and the input operands
1877 // reversed for GT and LTE.
1878 Condition condition = ComputeCompareCondition(op);
1879 if (op == Token::GT || op == Token::LTE) {
1880 condition = ReverseCondition(condition);
1881 }
1882 __ test(eax, Operand(eax));
1883 EmitBranch(true_block, false_block, condition);
1884}
1885
1886
1887void LCodeGen::DoReturn(LReturn* instr) {
1888 if (FLAG_trace) {
1889 // Preserve the return value on the stack and rely on the runtime
1890 // call to return the value in the same register.
1891 __ push(eax);
1892 __ CallRuntime(Runtime::kTraceExit, 1);
1893 }
1894 __ mov(esp, ebp);
1895 __ pop(ebp);
erik.corry@gmail.comd91075f2011-02-10 07:45:38 +00001896 __ Ret((ParameterCount() + 1) * kPointerSize, ecx);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001897}
1898
1899
1900void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
1901 Register result = ToRegister(instr->result());
1902 __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
1903 if (instr->hydrogen()->check_hole_value()) {
1904 __ cmp(result, Factory::the_hole_value());
1905 DeoptimizeIf(equal, instr->environment());
1906 }
1907}
1908
1909
1910void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001911 Register value = ToRegister(instr->InputAt(0));
ager@chromium.org378b34e2011-01-28 08:04:38 +00001912 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell());
1913
1914 // If the cell we are storing to contains the hole it could have
1915 // been deleted from the property dictionary. In that case, we need
1916 // to update the property details in the property dictionary to mark
1917 // it as no longer deleted. We deoptimize in that case.
1918 if (instr->hydrogen()->check_hole_value()) {
1919 __ cmp(cell_operand, Factory::the_hole_value());
1920 DeoptimizeIf(equal, instr->environment());
1921 }
1922
1923 // Store the value.
1924 __ mov(cell_operand, value);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001925}
1926
1927
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001928void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
ricow@chromium.org83aa5492011-02-07 12:42:56 +00001929 Register context = ToRegister(instr->context());
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001930 Register result = ToRegister(instr->result());
ricow@chromium.org83aa5492011-02-07 12:42:56 +00001931 __ mov(result, ContextOperand(context, instr->slot_index()));
1932}
1933
1934
1935void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
1936 Register context = ToRegister(instr->context());
1937 Register value = ToRegister(instr->value());
1938 __ mov(ContextOperand(context, instr->slot_index()), value);
1939 if (instr->needs_write_barrier()) {
1940 Register temp = ToRegister(instr->TempAt(0));
1941 int offset = Context::SlotOffset(instr->slot_index());
1942 __ RecordWrite(context, offset, value, temp);
1943 }
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001944}
1945
1946
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001947void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001948 Register object = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001949 Register result = ToRegister(instr->result());
1950 if (instr->hydrogen()->is_in_object()) {
1951 __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
1952 } else {
1953 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
1954 __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
1955 }
1956}
1957
1958
1959void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1960 ASSERT(ToRegister(instr->object()).is(eax));
1961 ASSERT(ToRegister(instr->result()).is(eax));
1962
1963 __ mov(ecx, instr->name());
1964 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1965 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1966}
1967
1968
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +00001969void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
1970 Register function = ToRegister(instr->function());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001971 Register temp = ToRegister(instr->TempAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +00001972 Register result = ToRegister(instr->result());
1973
1974 // Check that the function really is a function.
1975 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
1976 DeoptimizeIf(not_equal, instr->environment());
1977
1978 // Check whether the function has an instance prototype.
1979 NearLabel non_instance;
1980 __ test_b(FieldOperand(result, Map::kBitFieldOffset),
1981 1 << Map::kHasNonInstancePrototype);
1982 __ j(not_zero, &non_instance);
1983
1984 // Get the prototype or initial map from the function.
1985 __ mov(result,
1986 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1987
1988 // Check that the function has a prototype or an initial map.
1989 __ cmp(Operand(result), Immediate(Factory::the_hole_value()));
1990 DeoptimizeIf(equal, instr->environment());
1991
1992 // If the function does not have an initial map, we're done.
1993 NearLabel done;
1994 __ CmpObjectType(result, MAP_TYPE, temp);
1995 __ j(not_equal, &done);
1996
1997 // Get the prototype from the initial map.
1998 __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
1999 __ jmp(&done);
2000
2001 // Non-instance prototype: Fetch prototype from constructor field
2002 // in the function's map.
2003 __ bind(&non_instance);
2004 __ mov(result, FieldOperand(result, Map::kConstructorOffset));
2005
2006 // All done.
2007 __ bind(&done);
2008}
2009
2010
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002011void LCodeGen::DoLoadElements(LLoadElements* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002012 ASSERT(instr->result()->Equals(instr->InputAt(0)));
2013 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002014 __ mov(reg, FieldOperand(reg, JSObject::kElementsOffset));
2015 if (FLAG_debug_code) {
2016 NearLabel done;
2017 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
2018 Immediate(Factory::fixed_array_map()));
2019 __ j(equal, &done);
2020 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
2021 Immediate(Factory::fixed_cow_array_map()));
2022 __ Check(equal, "Check for fast elements failed.");
2023 __ bind(&done);
2024 }
2025}
2026
2027
2028void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2029 Register arguments = ToRegister(instr->arguments());
2030 Register length = ToRegister(instr->length());
2031 Operand index = ToOperand(instr->index());
2032 Register result = ToRegister(instr->result());
2033
2034 __ sub(length, index);
2035 DeoptimizeIf(below_equal, instr->environment());
2036
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002037 // There are two words between the frame pointer and the last argument.
2038 // Subtracting from length accounts for one of them add one more.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002039 __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2040}
2041
2042
2043void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2044 Register elements = ToRegister(instr->elements());
2045 Register key = ToRegister(instr->key());
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00002046 Register result = ToRegister(instr->result());
2047 ASSERT(result.is(elements));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002048
2049 // Load the result.
erik.corry@gmail.comd91075f2011-02-10 07:45:38 +00002050 __ mov(result, FieldOperand(elements,
2051 key,
2052 times_pointer_size,
2053 FixedArray::kHeaderSize));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002054
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00002055 // Check for the hole value.
2056 __ cmp(result, Factory::the_hole_value());
2057 DeoptimizeIf(equal, instr->environment());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002058}
2059
2060
2061void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2062 ASSERT(ToRegister(instr->object()).is(edx));
2063 ASSERT(ToRegister(instr->key()).is(eax));
2064
2065 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2066 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2067}
2068
2069
2070void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2071 Register result = ToRegister(instr->result());
2072
2073 // Check for arguments adapter frame.
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002074 NearLabel done, adapted;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002075 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2076 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
2077 __ cmp(Operand(result),
2078 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2079 __ j(equal, &adapted);
2080
2081 // No arguments adaptor frame.
2082 __ mov(result, Operand(ebp));
2083 __ jmp(&done);
2084
2085 // Arguments adaptor frame present.
2086 __ bind(&adapted);
2087 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2088
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002089 // Result is the frame pointer for the frame if not adapted and for the real
2090 // frame below the adaptor frame if adapted.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002091 __ bind(&done);
2092}
2093
2094
2095void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002096 Operand elem = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002097 Register result = ToRegister(instr->result());
2098
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002099 NearLabel done;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002100
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002101 // If no arguments adaptor frame the number of arguments is fixed.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002102 __ cmp(ebp, elem);
2103 __ mov(result, Immediate(scope()->num_parameters()));
2104 __ j(equal, &done);
2105
2106 // Arguments adaptor frame present. Get argument length from there.
2107 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2108 __ mov(result, Operand(result,
2109 ArgumentsAdaptorFrameConstants::kLengthOffset));
2110 __ SmiUntag(result);
2111
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002112 // Argument length is in result register.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002113 __ bind(&done);
2114}
2115
2116
2117void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2118 Register receiver = ToRegister(instr->receiver());
2119 ASSERT(ToRegister(instr->function()).is(edi));
2120 ASSERT(ToRegister(instr->result()).is(eax));
2121
2122 // If the receiver is null or undefined, we have to pass the
2123 // global object as a receiver.
2124 NearLabel global_receiver, receiver_ok;
2125 __ cmp(receiver, Factory::null_value());
2126 __ j(equal, &global_receiver);
2127 __ cmp(receiver, Factory::undefined_value());
2128 __ j(not_equal, &receiver_ok);
2129 __ bind(&global_receiver);
2130 __ mov(receiver, GlobalObjectOperand());
2131 __ bind(&receiver_ok);
2132
2133 Register length = ToRegister(instr->length());
2134 Register elements = ToRegister(instr->elements());
2135
2136 Label invoke;
2137
2138 // Copy the arguments to this function possibly from the
2139 // adaptor frame below it.
2140 const uint32_t kArgumentsLimit = 1 * KB;
2141 __ cmp(length, kArgumentsLimit);
2142 DeoptimizeIf(above, instr->environment());
2143
2144 __ push(receiver);
2145 __ mov(receiver, length);
2146
2147 // Loop through the arguments pushing them onto the execution
2148 // stack.
2149 Label loop;
2150 // length is a small non-negative integer, due to the test above.
2151 __ test(length, Operand(length));
2152 __ j(zero, &invoke);
2153 __ bind(&loop);
2154 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2155 __ dec(length);
2156 __ j(not_zero, &loop);
2157
2158 // Invoke the function.
2159 __ bind(&invoke);
kmillikin@chromium.org31b12772011-02-02 16:08:26 +00002160 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2161 LPointerMap* pointers = instr->pointer_map();
2162 LEnvironment* env = instr->deoptimization_environment();
2163 RecordPosition(pointers->position());
2164 RegisterEnvironmentForDeoptimization(env);
2165 SafepointGenerator safepoint_generator(this,
2166 pointers,
2167 env->deoptimization_index());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002168 ASSERT(receiver.is(eax));
2169 v8::internal::ParameterCount actual(eax);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002170 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator);
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002171
2172 // Restore context.
2173 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002174}
2175
2176
2177void LCodeGen::DoPushArgument(LPushArgument* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002178 LOperand* argument = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002179 if (argument->IsConstantOperand()) {
2180 __ push(ToImmediate(argument));
2181 } else {
2182 __ push(ToOperand(argument));
2183 }
2184}
2185
2186
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002187void LCodeGen::DoContext(LContext* instr) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002188 Register result = ToRegister(instr->result());
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002189 __ mov(result, esi);
2190}
2191
2192
2193void LCodeGen::DoOuterContext(LOuterContext* instr) {
2194 Register context = ToRegister(instr->context());
2195 Register result = ToRegister(instr->result());
2196 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2197 __ mov(result, FieldOperand(result, JSFunction::kContextOffset));
2198}
2199
2200
2201void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2202 Register context = ToRegister(instr->context());
2203 Register result = ToRegister(instr->result());
2204 __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002205}
2206
2207
2208void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002209 Register global = ToRegister(instr->global());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002210 Register result = ToRegister(instr->result());
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002211 __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002212}
2213
2214
2215void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2216 int arity,
2217 LInstruction* instr) {
2218 // Change context if needed.
2219 bool change_context =
2220 (graph()->info()->closure()->context() != function->context()) ||
2221 scope()->contains_with() ||
2222 (scope()->num_heap_slots() > 0);
2223 if (change_context) {
2224 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2225 }
2226
2227 // Set eax to arguments count if adaption is not needed. Assumes that eax
2228 // is available to write to at this point.
2229 if (!function->NeedsArgumentsAdaption()) {
2230 __ mov(eax, arity);
2231 }
2232
2233 LPointerMap* pointers = instr->pointer_map();
2234 RecordPosition(pointers->position());
2235
2236 // Invoke function.
2237 if (*function == *graph()->info()->closure()) {
2238 __ CallSelf();
2239 } else {
2240 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2241 }
2242
2243 // Setup deoptimization.
2244 RegisterLazyDeoptimization(instr);
2245
2246 // Restore context.
2247 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2248}
2249
2250
2251void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2252 ASSERT(ToRegister(instr->result()).is(eax));
2253 __ mov(edi, instr->function());
2254 CallKnownFunction(instr->function(), instr->arity(), instr);
2255}
2256
2257
2258void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002259 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002260 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2261 Factory::heap_number_map());
2262 DeoptimizeIf(not_equal, instr->environment());
2263
2264 Label done;
2265 Register tmp = input_reg.is(eax) ? ecx : eax;
2266 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2267
2268 // Preserve the value of all registers.
2269 __ PushSafepointRegisters();
2270
2271 Label negative;
2272 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002273 // Check the sign of the argument. If the argument is positive, just
2274 // return it. We do not need to patch the stack since |input| and
2275 // |result| are the same register and |input| will be restored
2276 // unchanged by popping safepoint registers.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002277 __ test(tmp, Immediate(HeapNumber::kSignMask));
2278 __ j(not_zero, &negative);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002279 __ jmp(&done);
2280
2281 __ bind(&negative);
2282
2283 Label allocated, slow;
2284 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2285 __ jmp(&allocated);
2286
2287 // Slow case: Call the runtime system to do the number allocation.
2288 __ bind(&slow);
2289
2290 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2291 RecordSafepointWithRegisters(
2292 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2293 // Set the pointer to the new heap number in tmp.
2294 if (!tmp.is(eax)) __ mov(tmp, eax);
2295
2296 // Restore input_reg after call to runtime.
2297 __ mov(input_reg, Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize));
2298
2299 __ bind(&allocated);
2300 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2301 __ and_(tmp2, ~HeapNumber::kSignMask);
2302 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2303 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2304 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002305 __ mov(Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize), tmp);
2306
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002307 __ bind(&done);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002308 __ PopSafepointRegisters();
2309}
2310
2311
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002312void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2313 Register input_reg = ToRegister(instr->InputAt(0));
2314 __ test(input_reg, Operand(input_reg));
2315 Label is_positive;
2316 __ j(not_sign, &is_positive);
2317 __ neg(input_reg);
2318 __ test(input_reg, Operand(input_reg));
2319 DeoptimizeIf(negative, instr->environment());
2320 __ bind(&is_positive);
2321}
2322
2323
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002324void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2325 // Class for deferred case.
2326 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2327 public:
2328 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2329 LUnaryMathOperation* instr)
2330 : LDeferredCode(codegen), instr_(instr) { }
2331 virtual void Generate() {
2332 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2333 }
2334 private:
2335 LUnaryMathOperation* instr_;
2336 };
2337
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002338 ASSERT(instr->InputAt(0)->Equals(instr->result()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002339 Representation r = instr->hydrogen()->value()->representation();
2340
2341 if (r.IsDouble()) {
2342 XMMRegister scratch = xmm0;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002343 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002344 __ pxor(scratch, scratch);
2345 __ subsd(scratch, input_reg);
2346 __ pand(input_reg, scratch);
2347 } else if (r.IsInteger32()) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002348 EmitIntegerMathAbs(instr);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002349 } else { // Tagged case.
2350 DeferredMathAbsTaggedHeapNumber* deferred =
2351 new DeferredMathAbsTaggedHeapNumber(this, instr);
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002352 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002353 // Smi check.
2354 __ test(input_reg, Immediate(kSmiTagMask));
2355 __ j(not_zero, deferred->entry());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002356 EmitIntegerMathAbs(instr);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002357 __ bind(deferred->exit());
2358 }
2359}
2360
2361
2362void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2363 XMMRegister xmm_scratch = xmm0;
2364 Register output_reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002365 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002366 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2367 __ ucomisd(input_reg, xmm_scratch);
2368
2369 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2370 DeoptimizeIf(below_equal, instr->environment());
2371 } else {
2372 DeoptimizeIf(below, instr->environment());
2373 }
2374
2375 // Use truncating instruction (OK because input is positive).
2376 __ cvttsd2si(output_reg, Operand(input_reg));
2377
2378 // Overflow is signalled with minint.
2379 __ cmp(output_reg, 0x80000000u);
2380 DeoptimizeIf(equal, instr->environment());
2381}
2382
2383
2384void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2385 XMMRegister xmm_scratch = xmm0;
2386 Register output_reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002387 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002388
2389 // xmm_scratch = 0.5
2390 ExternalReference one_half = ExternalReference::address_of_one_half();
2391 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
2392
2393 // input = input + 0.5
2394 __ addsd(input_reg, xmm_scratch);
2395
2396 // We need to return -0 for the input range [-0.5, 0[, otherwise
2397 // compute Math.floor(value + 0.5).
2398 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2399 __ ucomisd(input_reg, xmm_scratch);
2400 DeoptimizeIf(below_equal, instr->environment());
2401 } else {
2402 // If we don't need to bailout on -0, we check only bailout
2403 // on negative inputs.
2404 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2405 __ ucomisd(input_reg, xmm_scratch);
2406 DeoptimizeIf(below, instr->environment());
2407 }
2408
2409 // Compute Math.floor(value + 0.5).
2410 // Use truncating instruction (OK because input is positive).
2411 __ cvttsd2si(output_reg, Operand(input_reg));
2412
2413 // Overflow is signalled with minint.
2414 __ cmp(output_reg, 0x80000000u);
2415 DeoptimizeIf(equal, instr->environment());
2416}
2417
2418
2419void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002420 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002421 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2422 __ sqrtsd(input_reg, input_reg);
2423}
2424
2425
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002426void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2427 XMMRegister xmm_scratch = xmm0;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002428 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002429 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2430 ExternalReference negative_infinity =
2431 ExternalReference::address_of_negative_infinity();
2432 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity));
2433 __ ucomisd(xmm_scratch, input_reg);
2434 DeoptimizeIf(equal, instr->environment());
kmillikin@chromium.org31b12772011-02-02 16:08:26 +00002435 __ xorpd(xmm_scratch, xmm_scratch);
2436 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002437 __ sqrtsd(input_reg, input_reg);
2438}
2439
2440
2441void LCodeGen::DoPower(LPower* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002442 LOperand* left = instr->InputAt(0);
2443 LOperand* right = instr->InputAt(1);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002444 DoubleRegister result_reg = ToDoubleRegister(instr->result());
2445 Representation exponent_type = instr->hydrogen()->right()->representation();
2446 if (exponent_type.IsDouble()) {
2447 // It is safe to use ebx directly since the instruction is marked
2448 // as a call.
2449 __ PrepareCallCFunction(4, ebx);
2450 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2451 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
2452 __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2453 } else if (exponent_type.IsInteger32()) {
2454 // It is safe to use ebx directly since the instruction is marked
2455 // as a call.
2456 ASSERT(!ToRegister(right).is(ebx));
2457 __ PrepareCallCFunction(4, ebx);
2458 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2459 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
2460 __ CallCFunction(ExternalReference::power_double_int_function(), 4);
2461 } else {
2462 ASSERT(exponent_type.IsTagged());
2463 CpuFeatures::Scope scope(SSE2);
2464 Register right_reg = ToRegister(right);
2465
2466 Label non_smi, call;
2467 __ test(right_reg, Immediate(kSmiTagMask));
2468 __ j(not_zero, &non_smi);
2469 __ SmiUntag(right_reg);
2470 __ cvtsi2sd(result_reg, Operand(right_reg));
2471 __ jmp(&call);
2472
2473 __ bind(&non_smi);
2474 // It is safe to use ebx directly since the instruction is marked
2475 // as a call.
2476 ASSERT(!right_reg.is(ebx));
2477 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx);
2478 DeoptimizeIf(not_equal, instr->environment());
2479 __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset));
2480
2481 __ bind(&call);
2482 __ PrepareCallCFunction(4, ebx);
2483 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2484 __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
2485 __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2486 }
2487
2488 // Return value is in st(0) on ia32.
2489 // Store it into the (fixed) result register.
2490 __ sub(Operand(esp), Immediate(kDoubleSize));
2491 __ fstp_d(Operand(esp, 0));
2492 __ movdbl(result_reg, Operand(esp, 0));
2493 __ add(Operand(esp), Immediate(kDoubleSize));
2494}
2495
2496
2497void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2498 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
whesse@chromium.org023421e2010-12-21 12:19:12 +00002499 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2500 TranscendentalCacheStub::UNTAGGED);
2501 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2502}
2503
2504
2505void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2506 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2507 TranscendentalCacheStub stub(TranscendentalCache::COS,
2508 TranscendentalCacheStub::UNTAGGED);
2509 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2510}
2511
2512
2513void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2514 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2515 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2516 TranscendentalCacheStub::UNTAGGED);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002517 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2518}
2519
2520
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002521void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2522 switch (instr->op()) {
2523 case kMathAbs:
2524 DoMathAbs(instr);
2525 break;
2526 case kMathFloor:
2527 DoMathFloor(instr);
2528 break;
2529 case kMathRound:
2530 DoMathRound(instr);
2531 break;
2532 case kMathSqrt:
2533 DoMathSqrt(instr);
2534 break;
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002535 case kMathPowHalf:
2536 DoMathPowHalf(instr);
2537 break;
whesse@chromium.org023421e2010-12-21 12:19:12 +00002538 case kMathCos:
2539 DoMathCos(instr);
2540 break;
2541 case kMathSin:
2542 DoMathSin(instr);
2543 break;
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002544 case kMathLog:
2545 DoMathLog(instr);
2546 break;
2547
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002548 default:
2549 UNREACHABLE();
2550 }
2551}
2552
2553
2554void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2555 ASSERT(ToRegister(instr->result()).is(eax));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002556 ASSERT(ToRegister(instr->InputAt(0)).is(ecx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002557
2558 int arity = instr->arity();
2559 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2560 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2561 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2562}
2563
2564
2565void LCodeGen::DoCallNamed(LCallNamed* instr) {
2566 ASSERT(ToRegister(instr->result()).is(eax));
2567
2568 int arity = instr->arity();
2569 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2570 __ mov(ecx, instr->name());
2571 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2572 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2573}
2574
2575
2576void LCodeGen::DoCallFunction(LCallFunction* instr) {
2577 ASSERT(ToRegister(instr->result()).is(eax));
2578
2579 int arity = instr->arity();
2580 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2581 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2582 __ Drop(1);
2583 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2584}
2585
2586
2587void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2588 ASSERT(ToRegister(instr->result()).is(eax));
2589
2590 int arity = instr->arity();
2591 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2592 __ mov(ecx, instr->name());
2593 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2594 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2595}
2596
2597
2598void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2599 ASSERT(ToRegister(instr->result()).is(eax));
2600 __ mov(edi, instr->target());
2601 CallKnownFunction(instr->target(), instr->arity(), instr);
2602}
2603
2604
2605void LCodeGen::DoCallNew(LCallNew* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002606 ASSERT(ToRegister(instr->InputAt(0)).is(edi));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002607 ASSERT(ToRegister(instr->result()).is(eax));
2608
2609 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2610 __ Set(eax, Immediate(instr->arity()));
2611 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2612}
2613
2614
2615void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2616 CallRuntime(instr->function(), instr->arity(), instr);
2617}
2618
2619
2620void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2621 Register object = ToRegister(instr->object());
2622 Register value = ToRegister(instr->value());
2623 int offset = instr->offset();
2624
2625 if (!instr->transition().is_null()) {
2626 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
2627 }
2628
2629 // Do the store.
2630 if (instr->is_in_object()) {
2631 __ mov(FieldOperand(object, offset), value);
2632 if (instr->needs_write_barrier()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002633 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002634 // Update the write barrier for the object for in-object properties.
2635 __ RecordWrite(object, offset, value, temp);
2636 }
2637 } else {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002638 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002639 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
2640 __ mov(FieldOperand(temp, offset), value);
2641 if (instr->needs_write_barrier()) {
2642 // Update the write barrier for the properties array.
2643 // object is used as a scratch register.
2644 __ RecordWrite(temp, offset, value, object);
2645 }
2646 }
2647}
2648
2649
2650void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2651 ASSERT(ToRegister(instr->object()).is(edx));
2652 ASSERT(ToRegister(instr->value()).is(eax));
2653
2654 __ mov(ecx, instr->name());
2655 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
2656 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2657}
2658
2659
2660void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2661 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
2662 DeoptimizeIf(above_equal, instr->environment());
2663}
2664
2665
2666void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
2667 Register value = ToRegister(instr->value());
2668 Register elements = ToRegister(instr->object());
2669 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
2670
2671 // Do the store.
2672 if (instr->key()->IsConstantOperand()) {
2673 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
2674 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
2675 int offset =
2676 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
2677 __ mov(FieldOperand(elements, offset), value);
2678 } else {
erik.corry@gmail.comd91075f2011-02-10 07:45:38 +00002679 __ mov(FieldOperand(elements,
2680 key,
2681 times_pointer_size,
2682 FixedArray::kHeaderSize),
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002683 value);
2684 }
2685
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002686 if (instr->hydrogen()->NeedsWriteBarrier()) {
2687 // Compute address of modified element and store it into key register.
erik.corry@gmail.comd91075f2011-02-10 07:45:38 +00002688 __ lea(key,
2689 FieldOperand(elements,
2690 key,
2691 times_pointer_size,
2692 FixedArray::kHeaderSize));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002693 __ RecordWrite(elements, key, value);
2694 }
2695}
2696
2697
2698void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2699 ASSERT(ToRegister(instr->object()).is(edx));
2700 ASSERT(ToRegister(instr->key()).is(ecx));
2701 ASSERT(ToRegister(instr->value()).is(eax));
2702
2703 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
2704 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2705}
2706
2707
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002708void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
2709 class DeferredStringCharCodeAt: public LDeferredCode {
2710 public:
2711 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
2712 : LDeferredCode(codegen), instr_(instr) { }
2713 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
2714 private:
2715 LStringCharCodeAt* instr_;
2716 };
2717
2718 Register string = ToRegister(instr->string());
2719 Register index = no_reg;
2720 int const_index = -1;
2721 if (instr->index()->IsConstantOperand()) {
2722 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2723 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2724 if (!Smi::IsValid(const_index)) {
2725 // Guaranteed to be out of bounds because of the assert above.
2726 // So the bounds check that must dominate this instruction must
2727 // have deoptimized already.
2728 if (FLAG_debug_code) {
2729 __ Abort("StringCharCodeAt: out of bounds index.");
2730 }
2731 // No code needs to be generated.
2732 return;
2733 }
2734 } else {
2735 index = ToRegister(instr->index());
2736 }
2737 Register result = ToRegister(instr->result());
2738
2739 DeferredStringCharCodeAt* deferred =
2740 new DeferredStringCharCodeAt(this, instr);
2741
2742 NearLabel flat_string, ascii_string, done;
2743
2744 // Fetch the instance type of the receiver into result register.
2745 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
2746 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
2747
2748 // We need special handling for non-flat strings.
2749 STATIC_ASSERT(kSeqStringTag == 0);
2750 __ test(result, Immediate(kStringRepresentationMask));
2751 __ j(zero, &flat_string);
2752
2753 // Handle non-flat strings.
2754 __ test(result, Immediate(kIsConsStringMask));
2755 __ j(zero, deferred->entry());
2756
2757 // ConsString.
2758 // Check whether the right hand side is the empty string (i.e. if
2759 // this is really a flat string in a cons string). If that is not
2760 // the case we would rather go to the runtime system now to flatten
2761 // the string.
2762 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
2763 Immediate(Factory::empty_string()));
2764 __ j(not_equal, deferred->entry());
2765 // Get the first of the two strings and load its instance type.
2766 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
2767 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
2768 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
2769 // If the first cons component is also non-flat, then go to runtime.
2770 STATIC_ASSERT(kSeqStringTag == 0);
2771 __ test(result, Immediate(kStringRepresentationMask));
2772 __ j(not_zero, deferred->entry());
2773
2774 // Check for 1-byte or 2-byte string.
2775 __ bind(&flat_string);
2776 STATIC_ASSERT(kAsciiStringTag != 0);
2777 __ test(result, Immediate(kStringEncodingMask));
2778 __ j(not_zero, &ascii_string);
2779
2780 // 2-byte string.
2781 // Load the 2-byte character code into the result register.
2782 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2783 if (instr->index()->IsConstantOperand()) {
2784 __ movzx_w(result,
2785 FieldOperand(string,
2786 SeqTwoByteString::kHeaderSize + 2 * const_index));
2787 } else {
2788 __ movzx_w(result, FieldOperand(string,
2789 index,
2790 times_2,
2791 SeqTwoByteString::kHeaderSize));
2792 }
2793 __ jmp(&done);
2794
2795 // ASCII string.
2796 // Load the byte into the result register.
2797 __ bind(&ascii_string);
2798 if (instr->index()->IsConstantOperand()) {
2799 __ movzx_b(result, FieldOperand(string,
2800 SeqAsciiString::kHeaderSize + const_index));
2801 } else {
2802 __ movzx_b(result, FieldOperand(string,
2803 index,
2804 times_1,
2805 SeqAsciiString::kHeaderSize));
2806 }
2807 __ bind(&done);
2808 __ bind(deferred->exit());
2809}
2810
2811
2812void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
2813 Register string = ToRegister(instr->string());
2814 Register result = ToRegister(instr->result());
2815
2816 // TODO(3095996): Get rid of this. For now, we need to make the
2817 // result register contain a valid pointer because it is already
2818 // contained in the register pointer map.
2819 __ Set(result, Immediate(0));
2820
2821 __ PushSafepointRegisters();
2822 __ push(string);
2823 // Push the index as a smi. This is safe because of the checks in
2824 // DoStringCharCodeAt above.
2825 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2826 if (instr->index()->IsConstantOperand()) {
2827 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2828 __ push(Immediate(Smi::FromInt(const_index)));
2829 } else {
2830 Register index = ToRegister(instr->index());
2831 __ SmiTag(index);
2832 __ push(index);
2833 }
2834 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
2835 RecordSafepointWithRegisters(
2836 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
2837 if (FLAG_debug_code) {
2838 __ AbortIfNotSmi(eax);
2839 }
2840 __ SmiUntag(eax);
2841 __ mov(Operand(esp, EspIndexForPushAll(result) * kPointerSize), eax);
2842 __ PopSafepointRegisters();
2843}
2844
2845
2846void LCodeGen::DoStringLength(LStringLength* instr) {
2847 Register string = ToRegister(instr->string());
2848 Register result = ToRegister(instr->result());
2849 __ mov(result, FieldOperand(string, String::kLengthOffset));
2850}
2851
2852
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002853void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002854 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002855 ASSERT(input->IsRegister() || input->IsStackSlot());
2856 LOperand* output = instr->result();
2857 ASSERT(output->IsDoubleRegister());
2858 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
2859}
2860
2861
2862void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
2863 class DeferredNumberTagI: public LDeferredCode {
2864 public:
2865 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
2866 : LDeferredCode(codegen), instr_(instr) { }
2867 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
2868 private:
2869 LNumberTagI* instr_;
2870 };
2871
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002872 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002873 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2874 Register reg = ToRegister(input);
2875
2876 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
2877 __ SmiTag(reg);
2878 __ j(overflow, deferred->entry());
2879 __ bind(deferred->exit());
2880}
2881
2882
2883void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
2884 Label slow;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002885 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002886 Register tmp = reg.is(eax) ? ecx : eax;
2887
2888 // Preserve the value of all registers.
2889 __ PushSafepointRegisters();
2890
2891 // There was overflow, so bits 30 and 31 of the original integer
2892 // disagree. Try to allocate a heap number in new space and store
2893 // the value in there. If that fails, call the runtime system.
2894 NearLabel done;
2895 __ SmiUntag(reg);
2896 __ xor_(reg, 0x80000000);
2897 __ cvtsi2sd(xmm0, Operand(reg));
2898 if (FLAG_inline_new) {
2899 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
2900 __ jmp(&done);
2901 }
2902
2903 // Slow case: Call the runtime system to do the number allocation.
2904 __ bind(&slow);
2905
2906 // TODO(3095996): Put a valid pointer value in the stack slot where the result
2907 // register is stored, as this register is in the pointer map, but contains an
2908 // integer value.
2909 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0));
2910
2911 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2912 RecordSafepointWithRegisters(
2913 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2914 if (!reg.is(eax)) __ mov(reg, eax);
2915
2916 // Done. Put the value in xmm0 into the value of the allocated heap
2917 // number.
2918 __ bind(&done);
2919 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
2920 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), reg);
2921 __ PopSafepointRegisters();
2922}
2923
2924
2925void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
2926 class DeferredNumberTagD: public LDeferredCode {
2927 public:
2928 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
2929 : LDeferredCode(codegen), instr_(instr) { }
2930 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
2931 private:
2932 LNumberTagD* instr_;
2933 };
2934
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002935 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002936 Register reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002937 Register tmp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002938
2939 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
2940 if (FLAG_inline_new) {
2941 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
2942 } else {
2943 __ jmp(deferred->entry());
2944 }
2945 __ bind(deferred->exit());
2946 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
2947}
2948
2949
2950void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
2951 // TODO(3095996): Get rid of this. For now, we need to make the
2952 // result register contain a valid pointer because it is already
2953 // contained in the register pointer map.
2954 Register reg = ToRegister(instr->result());
2955 __ Set(reg, Immediate(0));
2956
2957 __ PushSafepointRegisters();
2958 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2959 RecordSafepointWithRegisters(
2960 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2961 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), eax);
2962 __ PopSafepointRegisters();
2963}
2964
2965
2966void LCodeGen::DoSmiTag(LSmiTag* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002967 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002968 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2969 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
2970 __ SmiTag(ToRegister(input));
2971}
2972
2973
2974void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002975 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002976 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2977 if (instr->needs_check()) {
2978 __ test(ToRegister(input), Immediate(kSmiTagMask));
2979 DeoptimizeIf(not_zero, instr->environment());
2980 }
2981 __ SmiUntag(ToRegister(input));
2982}
2983
2984
2985void LCodeGen::EmitNumberUntagD(Register input_reg,
2986 XMMRegister result_reg,
2987 LEnvironment* env) {
2988 NearLabel load_smi, heap_number, done;
2989
2990 // Smi check.
2991 __ test(input_reg, Immediate(kSmiTagMask));
2992 __ j(zero, &load_smi, not_taken);
2993
2994 // Heap number map check.
2995 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2996 Factory::heap_number_map());
2997 __ j(equal, &heap_number);
2998
2999 __ cmp(input_reg, Factory::undefined_value());
3000 DeoptimizeIf(not_equal, env);
3001
3002 // Convert undefined to NaN.
3003 __ push(input_reg);
3004 __ mov(input_reg, Factory::nan_value());
3005 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3006 __ pop(input_reg);
3007 __ jmp(&done);
3008
3009 // Heap number to XMM conversion.
3010 __ bind(&heap_number);
3011 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3012 __ jmp(&done);
3013
3014 // Smi to XMM conversion
3015 __ bind(&load_smi);
3016 __ SmiUntag(input_reg); // Untag smi before converting to float.
3017 __ cvtsi2sd(result_reg, Operand(input_reg));
3018 __ SmiTag(input_reg); // Retag smi.
3019 __ bind(&done);
3020}
3021
3022
3023class DeferredTaggedToI: public LDeferredCode {
3024 public:
3025 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3026 : LDeferredCode(codegen), instr_(instr) { }
3027 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3028 private:
3029 LTaggedToI* instr_;
3030};
3031
3032
3033void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3034 NearLabel done, heap_number;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003035 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003036
3037 // Heap number map check.
3038 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3039 Factory::heap_number_map());
3040
3041 if (instr->truncating()) {
3042 __ j(equal, &heap_number);
3043 // Check for undefined. Undefined is converted to zero for truncating
3044 // conversions.
3045 __ cmp(input_reg, Factory::undefined_value());
3046 DeoptimizeIf(not_equal, instr->environment());
3047 __ mov(input_reg, 0);
3048 __ jmp(&done);
3049
3050 __ bind(&heap_number);
3051 if (CpuFeatures::IsSupported(SSE3)) {
3052 CpuFeatures::Scope scope(SSE3);
3053 NearLabel convert;
3054 // Use more powerful conversion when sse3 is available.
3055 // Load x87 register with heap number.
3056 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
3057 // Get exponent alone and check for too-big exponent.
3058 __ mov(input_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3059 __ and_(input_reg, HeapNumber::kExponentMask);
3060 const uint32_t kTooBigExponent =
3061 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3062 __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
3063 __ j(less, &convert);
3064 // Pop FPU stack before deoptimizing.
3065 __ ffree(0);
3066 __ fincstp();
3067 DeoptimizeIf(no_condition, instr->environment());
3068
3069 // Reserve space for 64 bit answer.
3070 __ bind(&convert);
3071 __ sub(Operand(esp), Immediate(kDoubleSize));
3072 // Do conversion, which cannot fail because we checked the exponent.
3073 __ fisttp_d(Operand(esp, 0));
3074 __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3075 __ add(Operand(esp), Immediate(kDoubleSize));
3076 } else {
3077 NearLabel deopt;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003078 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003079 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3080 __ cvttsd2si(input_reg, Operand(xmm0));
3081 __ cmp(input_reg, 0x80000000u);
3082 __ j(not_equal, &done);
3083 // Check if the input was 0x8000000 (kMinInt).
3084 // If no, then we got an overflow and we deoptimize.
3085 ExternalReference min_int = ExternalReference::address_of_min_int();
3086 __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
3087 __ ucomisd(xmm_temp, xmm0);
3088 DeoptimizeIf(not_equal, instr->environment());
3089 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3090 }
3091 } else {
3092 // Deoptimize if we don't have a heap number.
3093 DeoptimizeIf(not_equal, instr->environment());
3094
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003095 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003096 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3097 __ cvttsd2si(input_reg, Operand(xmm0));
3098 __ cvtsi2sd(xmm_temp, Operand(input_reg));
3099 __ ucomisd(xmm0, xmm_temp);
3100 DeoptimizeIf(not_equal, instr->environment());
3101 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3102 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3103 __ test(input_reg, Operand(input_reg));
3104 __ j(not_zero, &done);
3105 __ movmskpd(input_reg, xmm0);
3106 __ and_(input_reg, 1);
3107 DeoptimizeIf(not_zero, instr->environment());
3108 }
3109 }
3110 __ bind(&done);
3111}
3112
3113
3114void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003115 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003116 ASSERT(input->IsRegister());
3117 ASSERT(input->Equals(instr->result()));
3118
3119 Register input_reg = ToRegister(input);
3120
3121 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3122
3123 // Smi check.
3124 __ test(input_reg, Immediate(kSmiTagMask));
3125 __ j(not_zero, deferred->entry());
3126
3127 // Smi to int32 conversion
3128 __ SmiUntag(input_reg); // Untag smi.
3129
3130 __ bind(deferred->exit());
3131}
3132
3133
3134void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003135 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003136 ASSERT(input->IsRegister());
3137 LOperand* result = instr->result();
3138 ASSERT(result->IsDoubleRegister());
3139
3140 Register input_reg = ToRegister(input);
3141 XMMRegister result_reg = ToDoubleRegister(result);
3142
3143 EmitNumberUntagD(input_reg, result_reg, instr->environment());
3144}
3145
3146
3147void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003148 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003149 ASSERT(input->IsDoubleRegister());
3150 LOperand* result = instr->result();
3151 ASSERT(result->IsRegister());
3152
3153 XMMRegister input_reg = ToDoubleRegister(input);
3154 Register result_reg = ToRegister(result);
3155
3156 if (instr->truncating()) {
3157 // Performs a truncating conversion of a floating point number as used by
3158 // the JS bitwise operations.
3159 __ cvttsd2si(result_reg, Operand(input_reg));
3160 __ cmp(result_reg, 0x80000000u);
3161 if (CpuFeatures::IsSupported(SSE3)) {
3162 // This will deoptimize if the exponent of the input in out of range.
3163 CpuFeatures::Scope scope(SSE3);
3164 NearLabel convert, done;
3165 __ j(not_equal, &done);
3166 __ sub(Operand(esp), Immediate(kDoubleSize));
3167 __ movdbl(Operand(esp, 0), input_reg);
3168 // Get exponent alone and check for too-big exponent.
3169 __ mov(result_reg, Operand(esp, sizeof(int32_t)));
3170 __ and_(result_reg, HeapNumber::kExponentMask);
3171 const uint32_t kTooBigExponent =
3172 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3173 __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
3174 __ j(less, &convert);
3175 __ add(Operand(esp), Immediate(kDoubleSize));
3176 DeoptimizeIf(no_condition, instr->environment());
3177 __ bind(&convert);
3178 // Do conversion, which cannot fail because we checked the exponent.
3179 __ fld_d(Operand(esp, 0));
3180 __ fisttp_d(Operand(esp, 0));
3181 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
3182 __ add(Operand(esp), Immediate(kDoubleSize));
3183 __ bind(&done);
3184 } else {
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003185 NearLabel done;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003186 Register temp_reg = ToRegister(instr->TempAt(0));
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003187 XMMRegister xmm_scratch = xmm0;
3188
3189 // If cvttsd2si succeeded, we're done. Otherwise, we attempt
3190 // manual conversion.
3191 __ j(not_equal, &done);
3192
3193 // Get high 32 bits of the input in result_reg and temp_reg.
3194 __ pshufd(xmm_scratch, input_reg, 1);
3195 __ movd(Operand(temp_reg), xmm_scratch);
3196 __ mov(result_reg, temp_reg);
3197
3198 // Prepare negation mask in temp_reg.
3199 __ sar(temp_reg, kBitsPerInt - 1);
3200
3201 // Extract the exponent from result_reg and subtract adjusted
3202 // bias from it. The adjustment is selected in a way such that
3203 // when the difference is zero, the answer is in the low 32 bits
3204 // of the input, otherwise a shift has to be performed.
3205 __ shr(result_reg, HeapNumber::kExponentShift);
3206 __ and_(result_reg,
3207 HeapNumber::kExponentMask >> HeapNumber::kExponentShift);
3208 __ sub(Operand(result_reg),
3209 Immediate(HeapNumber::kExponentBias +
3210 HeapNumber::kExponentBits +
3211 HeapNumber::kMantissaBits));
3212 // Don't handle big (> kMantissaBits + kExponentBits == 63) or
3213 // special exponents.
3214 DeoptimizeIf(greater, instr->environment());
3215
3216 // Zero out the sign and the exponent in the input (by shifting
3217 // it to the left) and restore the implicit mantissa bit,
3218 // i.e. convert the input to unsigned int64 shifted left by
3219 // kExponentBits.
3220 ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
3221 // Minus zero has the most significant bit set and the other
3222 // bits cleared.
3223 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
3224 __ psllq(input_reg, HeapNumber::kExponentBits);
3225 __ por(input_reg, xmm_scratch);
3226
3227 // Get the amount to shift the input right in xmm_scratch.
3228 __ neg(result_reg);
3229 __ movd(xmm_scratch, Operand(result_reg));
3230
3231 // Shift the input right and extract low 32 bits.
3232 __ psrlq(input_reg, xmm_scratch);
3233 __ movd(Operand(result_reg), input_reg);
3234
3235 // Use the prepared mask in temp_reg to negate the result if necessary.
3236 __ xor_(result_reg, Operand(temp_reg));
3237 __ sub(result_reg, Operand(temp_reg));
3238 __ bind(&done);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003239 }
3240 } else {
3241 NearLabel done;
3242 __ cvttsd2si(result_reg, Operand(input_reg));
3243 __ cvtsi2sd(xmm0, Operand(result_reg));
3244 __ ucomisd(xmm0, input_reg);
3245 DeoptimizeIf(not_equal, instr->environment());
3246 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3247 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3248 // The integer converted back is equal to the original. We
3249 // only have to test if we got -0 as an input.
3250 __ test(result_reg, Operand(result_reg));
3251 __ j(not_zero, &done);
3252 __ movmskpd(result_reg, input_reg);
3253 // Bit 0 contains the sign of the double in input_reg.
3254 // If input was positive, we are ok and return 0, otherwise
3255 // deoptimize.
3256 __ and_(result_reg, 1);
3257 DeoptimizeIf(not_zero, instr->environment());
3258 }
3259 __ bind(&done);
3260 }
3261}
3262
3263
3264void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003265 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003266 ASSERT(input->IsRegister());
3267 __ test(ToRegister(input), Immediate(kSmiTagMask));
3268 DeoptimizeIf(instr->condition(), instr->environment());
3269}
3270
3271
3272void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003273 Register input = ToRegister(instr->InputAt(0));
3274 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003275 InstanceType first = instr->hydrogen()->first();
3276 InstanceType last = instr->hydrogen()->last();
3277
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003278 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003279
3280 // If there is only one type in the interval check for equality.
3281 if (first == last) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003282 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3283 static_cast<int8_t>(first));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003284 DeoptimizeIf(not_equal, instr->environment());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003285 } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
3286 // String has a dedicated bit in instance type.
3287 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), kIsNotStringMask);
3288 DeoptimizeIf(not_zero, instr->environment());
3289 } else {
3290 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3291 static_cast<int8_t>(first));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003292 DeoptimizeIf(below, instr->environment());
3293 // Omit check for the last type.
3294 if (last != LAST_TYPE) {
3295 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3296 static_cast<int8_t>(last));
3297 DeoptimizeIf(above, instr->environment());
3298 }
3299 }
3300}
3301
3302
3303void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003304 ASSERT(instr->InputAt(0)->IsRegister());
3305 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003306 __ cmp(reg, instr->hydrogen()->target());
3307 DeoptimizeIf(not_equal, instr->environment());
3308}
3309
3310
3311void LCodeGen::DoCheckMap(LCheckMap* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003312 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003313 ASSERT(input->IsRegister());
3314 Register reg = ToRegister(input);
3315 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3316 instr->hydrogen()->map());
3317 DeoptimizeIf(not_equal, instr->environment());
3318}
3319
3320
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003321void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
3322 if (Heap::InNewSpace(*object)) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003323 Handle<JSGlobalPropertyCell> cell =
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003324 Factory::NewJSGlobalPropertyCell(object);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003325 __ mov(result, Operand::Cell(cell));
3326 } else {
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003327 __ mov(result, object);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003328 }
3329}
3330
3331
3332void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003333 Register reg = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003334
3335 Handle<JSObject> holder = instr->holder();
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003336 Handle<JSObject> current_prototype = instr->prototype();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003337
3338 // Load prototype object.
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003339 LoadHeapObject(reg, current_prototype);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003340
3341 // Check prototype maps up to the holder.
3342 while (!current_prototype.is_identical_to(holder)) {
3343 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3344 Handle<Map>(current_prototype->map()));
3345 DeoptimizeIf(not_equal, instr->environment());
3346 current_prototype =
3347 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3348 // Load next prototype object.
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003349 LoadHeapObject(reg, current_prototype);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003350 }
3351
3352 // Check the holder map.
3353 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3354 Handle<Map>(current_prototype->map()));
3355 DeoptimizeIf(not_equal, instr->environment());
3356}
3357
3358
3359void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3360 // Setup the parameters to the stub/runtime call.
3361 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3362 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3363 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3364 __ push(Immediate(instr->hydrogen()->constant_elements()));
3365
3366 // Pick the right runtime function or stub to call.
3367 int length = instr->hydrogen()->length();
3368 if (instr->hydrogen()->IsCopyOnWrite()) {
3369 ASSERT(instr->hydrogen()->depth() == 1);
3370 FastCloneShallowArrayStub::Mode mode =
3371 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3372 FastCloneShallowArrayStub stub(mode, length);
3373 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3374 } else if (instr->hydrogen()->depth() > 1) {
3375 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3376 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3377 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3378 } else {
3379 FastCloneShallowArrayStub::Mode mode =
3380 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3381 FastCloneShallowArrayStub stub(mode, length);
3382 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3383 }
3384}
3385
3386
3387void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3388 // Setup the parameters to the stub/runtime call.
3389 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3390 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3391 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3392 __ push(Immediate(instr->hydrogen()->constant_properties()));
3393 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3394
lrn@chromium.org5d00b602011-01-05 09:51:43 +00003395 // Pick the right runtime function to call.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003396 if (instr->hydrogen()->depth() > 1) {
3397 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3398 } else {
3399 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3400 }
3401}
3402
3403
3404void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3405 NearLabel materialized;
3406 // Registers will be used as follows:
3407 // edi = JS function.
3408 // ecx = literals array.
3409 // ebx = regexp literal.
3410 // eax = regexp literal clone.
3411 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3412 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3413 int literal_offset = FixedArray::kHeaderSize +
3414 instr->hydrogen()->literal_index() * kPointerSize;
3415 __ mov(ebx, FieldOperand(ecx, literal_offset));
3416 __ cmp(ebx, Factory::undefined_value());
3417 __ j(not_equal, &materialized);
3418
3419 // Create regexp literal using runtime function
3420 // Result will be in eax.
3421 __ push(ecx);
3422 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3423 __ push(Immediate(instr->hydrogen()->pattern()));
3424 __ push(Immediate(instr->hydrogen()->flags()));
3425 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3426 __ mov(ebx, eax);
3427
3428 __ bind(&materialized);
3429 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3430 Label allocated, runtime_allocate;
3431 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3432 __ jmp(&allocated);
3433
3434 __ bind(&runtime_allocate);
3435 __ push(ebx);
3436 __ push(Immediate(Smi::FromInt(size)));
3437 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3438 __ pop(ebx);
3439
3440 __ bind(&allocated);
3441 // Copy the content into the newly allocated memory.
3442 // (Unroll copy loop once for better throughput).
3443 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3444 __ mov(edx, FieldOperand(ebx, i));
3445 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3446 __ mov(FieldOperand(eax, i), edx);
3447 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3448 }
3449 if ((size % (2 * kPointerSize)) != 0) {
3450 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3451 __ mov(FieldOperand(eax, size - kPointerSize), edx);
3452 }
3453}
3454
3455
3456void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3457 // Use the fast case closure allocation code that allocates in new
3458 // space for nested functions that don't need literals cloning.
3459 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
ricow@chromium.org83aa5492011-02-07 12:42:56 +00003460 bool pretenure = instr->hydrogen()->pretenure();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003461 if (shared_info->num_literals() == 0 && !pretenure) {
3462 FastNewClosureStub stub;
3463 __ push(Immediate(shared_info));
3464 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3465 } else {
3466 __ push(esi);
3467 __ push(Immediate(shared_info));
3468 __ push(Immediate(pretenure
3469 ? Factory::true_value()
3470 : Factory::false_value()));
3471 CallRuntime(Runtime::kNewClosure, 3, instr);
3472 }
3473}
3474
3475
3476void LCodeGen::DoTypeof(LTypeof* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003477 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003478 if (input->IsConstantOperand()) {
3479 __ push(ToImmediate(input));
3480 } else {
3481 __ push(ToOperand(input));
3482 }
3483 CallRuntime(Runtime::kTypeof, 1, instr);
3484}
3485
3486
3487void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003488 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003489 Register result = ToRegister(instr->result());
3490 Label true_label;
3491 Label false_label;
3492 NearLabel done;
3493
3494 Condition final_branch_condition = EmitTypeofIs(&true_label,
3495 &false_label,
3496 input,
3497 instr->type_literal());
3498 __ j(final_branch_condition, &true_label);
3499 __ bind(&false_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003500 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003501 __ jmp(&done);
3502
3503 __ bind(&true_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003504 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003505
3506 __ bind(&done);
3507}
3508
3509
3510void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003511 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003512 int true_block = chunk_->LookupDestination(instr->true_block_id());
3513 int false_block = chunk_->LookupDestination(instr->false_block_id());
3514 Label* true_label = chunk_->GetAssemblyLabel(true_block);
3515 Label* false_label = chunk_->GetAssemblyLabel(false_block);
3516
3517 Condition final_branch_condition = EmitTypeofIs(true_label,
3518 false_label,
3519 input,
3520 instr->type_literal());
3521
3522 EmitBranch(true_block, false_block, final_branch_condition);
3523}
3524
3525
3526Condition LCodeGen::EmitTypeofIs(Label* true_label,
3527 Label* false_label,
3528 Register input,
3529 Handle<String> type_name) {
3530 Condition final_branch_condition = no_condition;
3531 if (type_name->Equals(Heap::number_symbol())) {
3532 __ test(input, Immediate(kSmiTagMask));
3533 __ j(zero, true_label);
3534 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
3535 Factory::heap_number_map());
3536 final_branch_condition = equal;
3537
3538 } else if (type_name->Equals(Heap::string_symbol())) {
3539 __ test(input, Immediate(kSmiTagMask));
3540 __ j(zero, false_label);
3541 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
3542 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3543 1 << Map::kIsUndetectable);
3544 __ j(not_zero, false_label);
3545 __ CmpInstanceType(input, FIRST_NONSTRING_TYPE);
3546 final_branch_condition = below;
3547
3548 } else if (type_name->Equals(Heap::boolean_symbol())) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003549 __ cmp(input, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003550 __ j(equal, true_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003551 __ cmp(input, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003552 final_branch_condition = equal;
3553
3554 } else if (type_name->Equals(Heap::undefined_symbol())) {
3555 __ cmp(input, Factory::undefined_value());
3556 __ j(equal, true_label);
3557 __ test(input, Immediate(kSmiTagMask));
3558 __ j(zero, false_label);
3559 // Check for undetectable objects => true.
3560 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
3561 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3562 1 << Map::kIsUndetectable);
3563 final_branch_condition = not_zero;
3564
3565 } else if (type_name->Equals(Heap::function_symbol())) {
3566 __ test(input, Immediate(kSmiTagMask));
3567 __ j(zero, false_label);
3568 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
3569 __ j(equal, true_label);
3570 // Regular expressions => 'function' (they are callable).
3571 __ CmpInstanceType(input, JS_REGEXP_TYPE);
3572 final_branch_condition = equal;
3573
3574 } else if (type_name->Equals(Heap::object_symbol())) {
3575 __ test(input, Immediate(kSmiTagMask));
3576 __ j(zero, false_label);
3577 __ cmp(input, Factory::null_value());
3578 __ j(equal, true_label);
3579 // Regular expressions => 'function', not 'object'.
3580 __ CmpObjectType(input, JS_REGEXP_TYPE, input);
3581 __ j(equal, false_label);
3582 // Check for undetectable objects => false.
3583 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3584 1 << Map::kIsUndetectable);
3585 __ j(not_zero, false_label);
3586 // Check for JS objects => true.
3587 __ CmpInstanceType(input, FIRST_JS_OBJECT_TYPE);
3588 __ j(below, false_label);
3589 __ CmpInstanceType(input, LAST_JS_OBJECT_TYPE);
3590 final_branch_condition = below_equal;
3591
3592 } else {
3593 final_branch_condition = not_equal;
3594 __ jmp(false_label);
3595 // A dead branch instruction will be generated after this point.
3596 }
3597
3598 return final_branch_condition;
3599}
3600
3601
erik.corry@gmail.comd91075f2011-02-10 07:45:38 +00003602void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
3603 Register result = ToRegister(instr->result());
3604 NearLabel true_label;
3605 NearLabel false_label;
3606 NearLabel done;
3607
3608 EmitIsConstructCall(result);
3609 __ j(equal, &true_label);
3610
3611 __ mov(result, Factory::false_value());
3612 __ jmp(&done);
3613
3614 __ bind(&true_label);
3615 __ mov(result, Factory::true_value());
3616
3617 __ bind(&done);
3618}
3619
3620
3621void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
3622 Register temp = ToRegister(instr->TempAt(0));
3623 int true_block = chunk_->LookupDestination(instr->true_block_id());
3624 int false_block = chunk_->LookupDestination(instr->false_block_id());
3625
3626 EmitIsConstructCall(temp);
3627 EmitBranch(true_block, false_block, equal);
3628}
3629
3630
3631void LCodeGen::EmitIsConstructCall(Register temp) {
3632 // Get the frame pointer for the calling frame.
3633 __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3634
3635 // Skip the arguments adaptor frame if it exists.
3636 NearLabel check_frame_marker;
3637 __ cmp(Operand(temp, StandardFrameConstants::kContextOffset),
3638 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3639 __ j(not_equal, &check_frame_marker);
3640 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
3641
3642 // Check the marker in the calling frame.
3643 __ bind(&check_frame_marker);
3644 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
3645 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3646}
3647
3648
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003649void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
3650 // No code for lazy bailout instruction. Used to capture environment after a
3651 // call for populating the safepoint data with deoptimization data.
3652}
3653
3654
3655void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
3656 DeoptimizeIf(no_condition, instr->environment());
3657}
3658
3659
3660void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
3661 LOperand* obj = instr->object();
3662 LOperand* key = instr->key();
3663 __ push(ToOperand(obj));
3664 if (key->IsConstantOperand()) {
3665 __ push(ToImmediate(key));
3666 } else {
3667 __ push(ToOperand(key));
3668 }
kmillikin@chromium.org31b12772011-02-02 16:08:26 +00003669 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
3670 LPointerMap* pointers = instr->pointer_map();
3671 LEnvironment* env = instr->deoptimization_environment();
3672 RecordPosition(pointers->position());
3673 RegisterEnvironmentForDeoptimization(env);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003674 SafepointGenerator safepoint_generator(this,
kmillikin@chromium.org31b12772011-02-02 16:08:26 +00003675 pointers,
3676 env->deoptimization_index());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003677 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
3678}
3679
3680
3681void LCodeGen::DoStackCheck(LStackCheck* instr) {
3682 // Perform stack overflow check.
3683 NearLabel done;
3684 ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
3685 __ cmp(esp, Operand::StaticVariable(stack_limit));
3686 __ j(above_equal, &done);
3687
3688 StackCheckStub stub;
3689 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3690 __ bind(&done);
3691}
3692
3693
3694void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
3695 // This is a pseudo-instruction that ensures that the environment here is
3696 // properly registered for deoptimization and records the assembler's PC
3697 // offset.
3698 LEnvironment* environment = instr->environment();
3699 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3700 instr->SpilledDoubleRegisterArray());
3701
3702 // If the environment were already registered, we would have no way of
3703 // backpatching it with the spill slot operands.
3704 ASSERT(!environment->HasBeenRegistered());
3705 RegisterEnvironmentForDeoptimization(environment);
3706 ASSERT(osr_pc_offset_ == -1);
3707 osr_pc_offset_ = masm()->pc_offset();
3708}
3709
3710
3711#undef __
3712
3713} } // namespace v8::internal
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003714
3715#endif // V8_TARGET_ARCH_IA32