blob: 9e4bada4f4d653ece4ecca4744f20c7cd48a103b [file] [log] [blame]
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001// Copyright 2011 the V8 project authors. All rights reserved.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +000028#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
kasperl@chromium.orga5551262010-12-07 12:49:48 +000032#include "ia32/lithium-codegen-ia32.h"
33#include "code-stubs.h"
34#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39
kmillikin@chromium.org31b12772011-02-02 16:08:26 +000040// When invoking builtins, we need to record the safepoint in the middle of
41// the invoke instruction sequence generated by the macro assembler.
kasperl@chromium.orga5551262010-12-07 12:49:48 +000042class SafepointGenerator : public PostCallGenerator {
43 public:
44 SafepointGenerator(LCodeGen* codegen,
45 LPointerMap* pointers,
46 int deoptimization_index)
47 : codegen_(codegen),
48 pointers_(pointers),
49 deoptimization_index_(deoptimization_index) { }
50 virtual ~SafepointGenerator() { }
51
52 virtual void Generate() {
53 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
54 }
55
56 private:
57 LCodeGen* codegen_;
58 LPointerMap* pointers_;
59 int deoptimization_index_;
60};
61
62
63#define __ masm()->
64
65bool LCodeGen::GenerateCode() {
66 HPhase phase("Code generation", chunk());
67 ASSERT(is_unused());
68 status_ = GENERATING;
69 CpuFeatures::Scope scope(SSE2);
70 return GeneratePrologue() &&
71 GenerateBody() &&
72 GenerateDeferredCode() &&
73 GenerateSafepointTable();
74}
75
76
77void LCodeGen::FinishCode(Handle<Code> code) {
78 ASSERT(is_done());
79 code->set_stack_slots(StackSlotCount());
ricow@chromium.org83aa5492011-02-07 12:42:56 +000080 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
kasperl@chromium.orga5551262010-12-07 12:49:48 +000081 PopulateDeoptimizationData(code);
82}
83
84
85void LCodeGen::Abort(const char* format, ...) {
86 if (FLAG_trace_bailout) {
87 SmartPointer<char> debug_name = graph()->debug_name()->ToCString();
88 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name);
89 va_list arguments;
90 va_start(arguments, format);
91 OS::VPrint(format, arguments);
92 va_end(arguments);
93 PrintF("\n");
94 }
95 status_ = ABORTED;
96}
97
98
99void LCodeGen::Comment(const char* format, ...) {
100 if (!FLAG_code_comments) return;
101 char buffer[4 * KB];
102 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
103 va_list arguments;
104 va_start(arguments, format);
105 builder.AddFormattedList(format, arguments);
106 va_end(arguments);
107
108 // Copy the string before recording it in the assembler to avoid
109 // issues when the stack allocated buffer goes out of scope.
110 size_t length = builder.position();
111 Vector<char> copy = Vector<char>::New(length + 1);
112 memcpy(copy.start(), builder.Finalize(), copy.length());
113 masm()->RecordComment(copy.start());
114}
115
116
117bool LCodeGen::GeneratePrologue() {
118 ASSERT(is_generating());
119
120#ifdef DEBUG
121 if (strlen(FLAG_stop_at) > 0 &&
122 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
123 __ int3();
124 }
125#endif
126
127 __ push(ebp); // Caller's frame pointer.
128 __ mov(ebp, esp);
129 __ push(esi); // Callee's context.
130 __ push(edi); // Callee's JS function.
131
132 // Reserve space for the stack slots needed by the code.
133 int slots = StackSlotCount();
134 if (slots > 0) {
135 if (FLAG_debug_code) {
136 __ mov(Operand(eax), Immediate(slots));
137 Label loop;
138 __ bind(&loop);
139 __ push(Immediate(kSlotsZapValue));
140 __ dec(eax);
141 __ j(not_zero, &loop);
142 } else {
143 __ sub(Operand(esp), Immediate(slots * kPointerSize));
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000144#ifdef _MSC_VER
145 // On windows, you may not access the stack more than one page below
146 // the most recently mapped page. To make the allocated area randomly
147 // accessible, we write to each page in turn (the value is irrelevant).
148 const int kPageSize = 4 * KB;
149 for (int offset = slots * kPointerSize - kPageSize;
150 offset > 0;
151 offset -= kPageSize) {
152 __ mov(Operand(esp, offset), eax);
153 }
154#endif
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000155 }
156 }
157
158 // Trace the call.
159 if (FLAG_trace) {
160 __ CallRuntime(Runtime::kTraceEnter, 0);
161 }
162 return !is_aborted();
163}
164
165
166bool LCodeGen::GenerateBody() {
167 ASSERT(is_generating());
168 bool emit_instructions = true;
169 for (current_instruction_ = 0;
170 !is_aborted() && current_instruction_ < instructions_->length();
171 current_instruction_++) {
172 LInstruction* instr = instructions_->at(current_instruction_);
173 if (instr->IsLabel()) {
174 LLabel* label = LLabel::cast(instr);
175 emit_instructions = !label->HasReplacement();
176 }
177
178 if (emit_instructions) {
179 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
180 instr->CompileToNative(this);
181 }
182 }
183 return !is_aborted();
184}
185
186
187LInstruction* LCodeGen::GetNextInstruction() {
188 if (current_instruction_ < instructions_->length() - 1) {
189 return instructions_->at(current_instruction_ + 1);
190 } else {
191 return NULL;
192 }
193}
194
195
196bool LCodeGen::GenerateDeferredCode() {
197 ASSERT(is_generating());
198 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
199 LDeferredCode* code = deferred_[i];
200 __ bind(code->entry());
201 code->Generate();
202 __ jmp(code->exit());
203 }
204
205 // Deferred code is the last part of the instruction sequence. Mark
206 // the generated code as done unless we bailed out.
207 if (!is_aborted()) status_ = DONE;
208 return !is_aborted();
209}
210
211
212bool LCodeGen::GenerateSafepointTable() {
213 ASSERT(is_done());
214 safepoints_.Emit(masm(), StackSlotCount());
215 return !is_aborted();
216}
217
218
219Register LCodeGen::ToRegister(int index) const {
220 return Register::FromAllocationIndex(index);
221}
222
223
224XMMRegister LCodeGen::ToDoubleRegister(int index) const {
225 return XMMRegister::FromAllocationIndex(index);
226}
227
228
229Register LCodeGen::ToRegister(LOperand* op) const {
230 ASSERT(op->IsRegister());
231 return ToRegister(op->index());
232}
233
234
235XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
236 ASSERT(op->IsDoubleRegister());
237 return ToDoubleRegister(op->index());
238}
239
240
241int LCodeGen::ToInteger32(LConstantOperand* op) const {
242 Handle<Object> value = chunk_->LookupLiteral(op);
243 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
244 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
245 value->Number());
246 return static_cast<int32_t>(value->Number());
247}
248
249
250Immediate LCodeGen::ToImmediate(LOperand* op) {
251 LConstantOperand* const_op = LConstantOperand::cast(op);
252 Handle<Object> literal = chunk_->LookupLiteral(const_op);
253 Representation r = chunk_->LookupLiteralRepresentation(const_op);
254 if (r.IsInteger32()) {
255 ASSERT(literal->IsNumber());
256 return Immediate(static_cast<int32_t>(literal->Number()));
257 } else if (r.IsDouble()) {
258 Abort("unsupported double immediate");
259 }
260 ASSERT(r.IsTagged());
261 return Immediate(literal);
262}
263
264
265Operand LCodeGen::ToOperand(LOperand* op) const {
266 if (op->IsRegister()) return Operand(ToRegister(op));
267 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
268 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
269 int index = op->index();
270 if (index >= 0) {
271 // Local or spill slot. Skip the frame pointer, function, and
272 // context in the fixed part of the frame.
273 return Operand(ebp, -(index + 3) * kPointerSize);
274 } else {
275 // Incoming parameter. Skip the return address.
276 return Operand(ebp, -(index - 1) * kPointerSize);
277 }
278}
279
280
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000281Operand LCodeGen::HighOperand(LOperand* op) {
282 ASSERT(op->IsDoubleStackSlot());
283 int index = op->index();
284 int offset = (index >= 0) ? index + 3 : index - 1;
285 return Operand(ebp, -offset * kPointerSize);
286}
287
288
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000289void LCodeGen::WriteTranslation(LEnvironment* environment,
290 Translation* translation) {
291 if (environment == NULL) return;
292
293 // The translation includes one command per value in the environment.
294 int translation_size = environment->values()->length();
295 // The output frame height does not include the parameters.
296 int height = translation_size - environment->parameter_count();
297
298 WriteTranslation(environment->outer(), translation);
299 int closure_id = DefineDeoptimizationLiteral(environment->closure());
300 translation->BeginFrame(environment->ast_id(), closure_id, height);
301 for (int i = 0; i < translation_size; ++i) {
302 LOperand* value = environment->values()->at(i);
303 // spilled_registers_ and spilled_double_registers_ are either
304 // both NULL or both set.
305 if (environment->spilled_registers() != NULL && value != NULL) {
306 if (value->IsRegister() &&
307 environment->spilled_registers()[value->index()] != NULL) {
308 translation->MarkDuplicate();
309 AddToTranslation(translation,
310 environment->spilled_registers()[value->index()],
311 environment->HasTaggedValueAt(i));
312 } else if (
313 value->IsDoubleRegister() &&
314 environment->spilled_double_registers()[value->index()] != NULL) {
315 translation->MarkDuplicate();
316 AddToTranslation(
317 translation,
318 environment->spilled_double_registers()[value->index()],
319 false);
320 }
321 }
322
323 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
324 }
325}
326
327
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000328void LCodeGen::AddToTranslation(Translation* translation,
329 LOperand* op,
330 bool is_tagged) {
331 if (op == NULL) {
332 // TODO(twuerthinger): Introduce marker operands to indicate that this value
333 // is not present and must be reconstructed from the deoptimizer. Currently
334 // this is only used for the arguments object.
335 translation->StoreArgumentsObject();
336 } else if (op->IsStackSlot()) {
337 if (is_tagged) {
338 translation->StoreStackSlot(op->index());
339 } else {
340 translation->StoreInt32StackSlot(op->index());
341 }
342 } else if (op->IsDoubleStackSlot()) {
343 translation->StoreDoubleStackSlot(op->index());
344 } else if (op->IsArgument()) {
345 ASSERT(is_tagged);
346 int src_index = StackSlotCount() + op->index();
347 translation->StoreStackSlot(src_index);
348 } else if (op->IsRegister()) {
349 Register reg = ToRegister(op);
350 if (is_tagged) {
351 translation->StoreRegister(reg);
352 } else {
353 translation->StoreInt32Register(reg);
354 }
355 } else if (op->IsDoubleRegister()) {
356 XMMRegister reg = ToDoubleRegister(op);
357 translation->StoreDoubleRegister(reg);
358 } else if (op->IsConstantOperand()) {
359 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
360 int src_index = DefineDeoptimizationLiteral(literal);
361 translation->StoreLiteral(src_index);
362 } else {
363 UNREACHABLE();
364 }
365}
366
367
368void LCodeGen::CallCode(Handle<Code> code,
369 RelocInfo::Mode mode,
370 LInstruction* instr) {
kmillikin@chromium.org31b12772011-02-02 16:08:26 +0000371 ASSERT(instr != NULL);
372 LPointerMap* pointers = instr->pointer_map();
373 RecordPosition(pointers->position());
374 __ call(code, mode);
375 RegisterLazyDeoptimization(instr);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +0000376
377 // Signal that we don't inline smi code before these stubs in the
378 // optimizing code generator.
379 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
380 code->kind() == Code::COMPARE_IC) {
381 __ nop();
382 }
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000383}
384
385
386void LCodeGen::CallRuntime(Runtime::Function* function,
387 int num_arguments,
388 LInstruction* instr) {
389 ASSERT(instr != NULL);
kmillikin@chromium.org31b12772011-02-02 16:08:26 +0000390 ASSERT(instr->HasPointerMap());
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000391 LPointerMap* pointers = instr->pointer_map();
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000392 RecordPosition(pointers->position());
393
394 __ CallRuntime(function, num_arguments);
kmillikin@chromium.org31b12772011-02-02 16:08:26 +0000395 RegisterLazyDeoptimization(instr);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000396}
397
398
399void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
400 // Create the environment to bailout to. If the call has side effects
401 // execution has to continue after the call otherwise execution can continue
402 // from a previous bailout point repeating the call.
fschneider@chromium.org1df6b472011-01-26 08:23:03 +0000403 LEnvironment* deoptimization_environment;
404 if (instr->HasDeoptimizationEnvironment()) {
405 deoptimization_environment = instr->deoptimization_environment();
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000406 } else {
fschneider@chromium.org1df6b472011-01-26 08:23:03 +0000407 deoptimization_environment = instr->environment();
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000408 }
409
fschneider@chromium.org1df6b472011-01-26 08:23:03 +0000410 RegisterEnvironmentForDeoptimization(deoptimization_environment);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000411 RecordSafepoint(instr->pointer_map(),
fschneider@chromium.org1df6b472011-01-26 08:23:03 +0000412 deoptimization_environment->deoptimization_index());
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000413}
414
415
416void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
417 if (!environment->HasBeenRegistered()) {
418 // Physical stack frame layout:
419 // -x ............. -4 0 ..................................... y
420 // [incoming arguments] [spill slots] [pushed outgoing arguments]
421
422 // Layout of the environment:
423 // 0 ..................................................... size-1
424 // [parameters] [locals] [expression stack including arguments]
425
426 // Layout of the translation:
427 // 0 ........................................................ size - 1 + 4
428 // [expression stack including arguments] [locals] [4 words] [parameters]
429 // |>------------ translation_size ------------<|
430
431 int frame_count = 0;
432 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
433 ++frame_count;
434 }
435 Translation translation(&translations_, frame_count);
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +0000436 WriteTranslation(environment, &translation);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000437 int deoptimization_index = deoptimizations_.length();
438 environment->Register(deoptimization_index, translation.index());
439 deoptimizations_.Add(environment);
440 }
441}
442
443
444void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
445 RegisterEnvironmentForDeoptimization(environment);
446 ASSERT(environment->HasBeenRegistered());
447 int id = environment->deoptimization_index();
448 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
449 ASSERT(entry != NULL);
450 if (entry == NULL) {
451 Abort("bailout was not prepared");
452 return;
453 }
454
455 if (FLAG_deopt_every_n_times != 0) {
456 Handle<SharedFunctionInfo> shared(info_->shared_info());
457 Label no_deopt;
458 __ pushfd();
459 __ push(eax);
460 __ push(ebx);
461 __ mov(ebx, shared);
462 __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
463 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
464 __ j(not_zero, &no_deopt);
465 if (FLAG_trap_on_deopt) __ int3();
466 __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
467 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
468 __ pop(ebx);
469 __ pop(eax);
470 __ popfd();
471 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
472
473 __ bind(&no_deopt);
474 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
475 __ pop(ebx);
476 __ pop(eax);
477 __ popfd();
478 }
479
480 if (cc == no_condition) {
481 if (FLAG_trap_on_deopt) __ int3();
482 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
483 } else {
484 if (FLAG_trap_on_deopt) {
485 NearLabel done;
486 __ j(NegateCondition(cc), &done);
487 __ int3();
488 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
489 __ bind(&done);
490 } else {
491 __ j(cc, entry, RelocInfo::RUNTIME_ENTRY, not_taken);
492 }
493 }
494}
495
496
497void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
498 int length = deoptimizations_.length();
499 if (length == 0) return;
500 ASSERT(FLAG_deopt);
501 Handle<DeoptimizationInputData> data =
502 Factory::NewDeoptimizationInputData(length, TENURED);
503
504 data->SetTranslationByteArray(*translations_.CreateByteArray());
505 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
506
507 Handle<FixedArray> literals =
508 Factory::NewFixedArray(deoptimization_literals_.length(), TENURED);
509 for (int i = 0; i < deoptimization_literals_.length(); i++) {
510 literals->set(i, *deoptimization_literals_[i]);
511 }
512 data->SetLiteralArray(*literals);
513
514 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
515 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
516
517 // Populate the deoptimization entries.
518 for (int i = 0; i < length; i++) {
519 LEnvironment* env = deoptimizations_[i];
520 data->SetAstId(i, Smi::FromInt(env->ast_id()));
521 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
522 data->SetArgumentsStackHeight(i,
523 Smi::FromInt(env->arguments_stack_height()));
524 }
525 code->set_deoptimization_data(*data);
526}
527
528
529int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
530 int result = deoptimization_literals_.length();
531 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
532 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
533 }
534 deoptimization_literals_.Add(literal);
535 return result;
536}
537
538
539void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
540 ASSERT(deoptimization_literals_.length() == 0);
541
542 const ZoneList<Handle<JSFunction> >* inlined_closures =
543 chunk()->inlined_closures();
544
545 for (int i = 0, length = inlined_closures->length();
546 i < length;
547 i++) {
548 DefineDeoptimizationLiteral(inlined_closures->at(i));
549 }
550
551 inlined_function_count_ = deoptimization_literals_.length();
552}
553
554
ager@chromium.org378b34e2011-01-28 08:04:38 +0000555void LCodeGen::RecordSafepoint(
556 LPointerMap* pointers,
557 Safepoint::Kind kind,
558 int arguments,
559 int deoptimization_index) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000560 const ZoneList<LOperand*>* operands = pointers->operands();
561 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
ager@chromium.org378b34e2011-01-28 08:04:38 +0000562 kind, arguments, deoptimization_index);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000563 for (int i = 0; i < operands->length(); i++) {
564 LOperand* pointer = operands->at(i);
565 if (pointer->IsStackSlot()) {
566 safepoint.DefinePointerSlot(pointer->index());
ager@chromium.org378b34e2011-01-28 08:04:38 +0000567 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
568 safepoint.DefinePointerRegister(ToRegister(pointer));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000569 }
570 }
ager@chromium.org378b34e2011-01-28 08:04:38 +0000571 if (kind & Safepoint::kWithRegisters) {
572 // Register esi always contains a pointer to the context.
573 safepoint.DefinePointerRegister(esi);
574 }
575}
576
577
578void LCodeGen::RecordSafepoint(LPointerMap* pointers,
579 int deoptimization_index) {
580 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000581}
582
583
584void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
585 int arguments,
586 int deoptimization_index) {
ager@chromium.org378b34e2011-01-28 08:04:38 +0000587 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
588 deoptimization_index);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000589}
590
591
592void LCodeGen::RecordPosition(int position) {
593 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
594 masm()->positions_recorder()->RecordPosition(position);
595}
596
597
598void LCodeGen::DoLabel(LLabel* label) {
599 if (label->is_loop_header()) {
600 Comment(";;; B%d - LOOP entry", label->block_id());
601 } else {
602 Comment(";;; B%d", label->block_id());
603 }
604 __ bind(label->label());
605 current_block_ = label->block_id();
606 LCodeGen::DoGap(label);
607}
608
609
610void LCodeGen::DoParallelMove(LParallelMove* move) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000611 resolver_.Resolve(move);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000612}
613
614
615void LCodeGen::DoGap(LGap* gap) {
616 for (int i = LGap::FIRST_INNER_POSITION;
617 i <= LGap::LAST_INNER_POSITION;
618 i++) {
619 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
620 LParallelMove* move = gap->GetParallelMove(inner_pos);
621 if (move != NULL) DoParallelMove(move);
622 }
623
624 LInstruction* next = GetNextInstruction();
625 if (next != NULL && next->IsLazyBailout()) {
626 int pc = masm()->pc_offset();
627 safepoints_.SetPcAfterGap(pc);
628 }
629}
630
631
632void LCodeGen::DoParameter(LParameter* instr) {
633 // Nothing to do.
634}
635
636
637void LCodeGen::DoCallStub(LCallStub* instr) {
638 ASSERT(ToRegister(instr->result()).is(eax));
639 switch (instr->hydrogen()->major_key()) {
640 case CodeStub::RegExpConstructResult: {
641 RegExpConstructResultStub stub;
642 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
643 break;
644 }
645 case CodeStub::RegExpExec: {
646 RegExpExecStub stub;
647 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
648 break;
649 }
650 case CodeStub::SubString: {
651 SubStringStub stub;
652 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
653 break;
654 }
655 case CodeStub::StringCharAt: {
656 StringCharAtStub stub;
657 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
658 break;
659 }
660 case CodeStub::MathPow: {
661 MathPowStub stub;
662 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
663 break;
664 }
665 case CodeStub::NumberToString: {
666 NumberToStringStub stub;
667 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
668 break;
669 }
670 case CodeStub::StringAdd: {
671 StringAddStub stub(NO_STRING_ADD_FLAGS);
672 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
673 break;
674 }
675 case CodeStub::StringCompare: {
676 StringCompareStub stub;
677 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
678 break;
679 }
680 case CodeStub::TranscendentalCache: {
whesse@chromium.org023421e2010-12-21 12:19:12 +0000681 TranscendentalCacheStub stub(instr->transcendental_type(),
682 TranscendentalCacheStub::TAGGED);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000683 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
684 break;
685 }
686 default:
687 UNREACHABLE();
688 }
689}
690
691
692void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
693 // Nothing to do.
694}
695
696
697void LCodeGen::DoModI(LModI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000698 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000699 ASSERT(ToRegister(instr->result()).is(edx));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000700 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
701 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
702 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000703
704 Register right_reg = ToRegister(right);
705
706 // Check for x % 0.
707 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
708 __ test(right_reg, ToOperand(right));
709 DeoptimizeIf(zero, instr->environment());
710 }
711
712 // Sign extend to edx.
713 __ cdq();
714
715 // Check for (0 % -x) that will produce negative zero.
716 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
717 NearLabel positive_left;
718 NearLabel done;
719 __ test(eax, Operand(eax));
720 __ j(not_sign, &positive_left);
721 __ idiv(right_reg);
722
723 // Test the remainder for 0, because then the result would be -0.
724 __ test(edx, Operand(edx));
725 __ j(not_zero, &done);
726
727 DeoptimizeIf(no_condition, instr->environment());
728 __ bind(&positive_left);
729 __ idiv(right_reg);
730 __ bind(&done);
731 } else {
732 __ idiv(right_reg);
733 }
734}
735
736
737void LCodeGen::DoDivI(LDivI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000738 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000739 ASSERT(ToRegister(instr->result()).is(eax));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000740 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
741 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
742 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000743
744 Register left_reg = eax;
745
746 // Check for x / 0.
747 Register right_reg = ToRegister(right);
748 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
749 __ test(right_reg, ToOperand(right));
750 DeoptimizeIf(zero, instr->environment());
751 }
752
753 // Check for (0 / -x) that will produce negative zero.
754 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
755 NearLabel left_not_zero;
756 __ test(left_reg, Operand(left_reg));
757 __ j(not_zero, &left_not_zero);
758 __ test(right_reg, ToOperand(right));
759 DeoptimizeIf(sign, instr->environment());
760 __ bind(&left_not_zero);
761 }
762
763 // Check for (-kMinInt / -1).
764 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
765 NearLabel left_not_min_int;
766 __ cmp(left_reg, kMinInt);
767 __ j(not_zero, &left_not_min_int);
768 __ cmp(right_reg, -1);
769 DeoptimizeIf(zero, instr->environment());
770 __ bind(&left_not_min_int);
771 }
772
773 // Sign extend to edx.
774 __ cdq();
775 __ idiv(right_reg);
776
777 // Deoptimize if remainder is not 0.
778 __ test(edx, Operand(edx));
779 DeoptimizeIf(not_zero, instr->environment());
780}
781
782
783void LCodeGen::DoMulI(LMulI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000784 Register left = ToRegister(instr->InputAt(0));
785 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000786
787 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000788 __ mov(ToRegister(instr->TempAt(0)), left);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000789 }
790
791 if (right->IsConstantOperand()) {
792 __ imul(left, left, ToInteger32(LConstantOperand::cast(right)));
793 } else {
794 __ imul(left, ToOperand(right));
795 }
796
797 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
798 DeoptimizeIf(overflow, instr->environment());
799 }
800
801 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
802 // Bail out if the result is supposed to be negative zero.
803 NearLabel done;
804 __ test(left, Operand(left));
805 __ j(not_zero, &done);
806 if (right->IsConstantOperand()) {
807 if (ToInteger32(LConstantOperand::cast(right)) < 0) {
808 DeoptimizeIf(no_condition, instr->environment());
809 }
810 } else {
811 // Test the non-zero operand for negative sign.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000812 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000813 DeoptimizeIf(sign, instr->environment());
814 }
815 __ bind(&done);
816 }
817}
818
819
820void LCodeGen::DoBitI(LBitI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000821 LOperand* left = instr->InputAt(0);
822 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000823 ASSERT(left->Equals(instr->result()));
824 ASSERT(left->IsRegister());
825
826 if (right->IsConstantOperand()) {
827 int right_operand = ToInteger32(LConstantOperand::cast(right));
828 switch (instr->op()) {
829 case Token::BIT_AND:
830 __ and_(ToRegister(left), right_operand);
831 break;
832 case Token::BIT_OR:
833 __ or_(ToRegister(left), right_operand);
834 break;
835 case Token::BIT_XOR:
836 __ xor_(ToRegister(left), right_operand);
837 break;
838 default:
839 UNREACHABLE();
840 break;
841 }
842 } else {
843 switch (instr->op()) {
844 case Token::BIT_AND:
845 __ and_(ToRegister(left), ToOperand(right));
846 break;
847 case Token::BIT_OR:
848 __ or_(ToRegister(left), ToOperand(right));
849 break;
850 case Token::BIT_XOR:
851 __ xor_(ToRegister(left), ToOperand(right));
852 break;
853 default:
854 UNREACHABLE();
855 break;
856 }
857 }
858}
859
860
861void LCodeGen::DoShiftI(LShiftI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000862 LOperand* left = instr->InputAt(0);
863 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000864 ASSERT(left->Equals(instr->result()));
865 ASSERT(left->IsRegister());
866 if (right->IsRegister()) {
867 ASSERT(ToRegister(right).is(ecx));
868
869 switch (instr->op()) {
870 case Token::SAR:
871 __ sar_cl(ToRegister(left));
872 break;
873 case Token::SHR:
874 __ shr_cl(ToRegister(left));
875 if (instr->can_deopt()) {
876 __ test(ToRegister(left), Immediate(0x80000000));
877 DeoptimizeIf(not_zero, instr->environment());
878 }
879 break;
880 case Token::SHL:
881 __ shl_cl(ToRegister(left));
882 break;
883 default:
884 UNREACHABLE();
885 break;
886 }
887 } else {
888 int value = ToInteger32(LConstantOperand::cast(right));
889 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
890 switch (instr->op()) {
891 case Token::SAR:
892 if (shift_count != 0) {
893 __ sar(ToRegister(left), shift_count);
894 }
895 break;
896 case Token::SHR:
897 if (shift_count == 0 && instr->can_deopt()) {
898 __ test(ToRegister(left), Immediate(0x80000000));
899 DeoptimizeIf(not_zero, instr->environment());
900 } else {
901 __ shr(ToRegister(left), shift_count);
902 }
903 break;
904 case Token::SHL:
905 if (shift_count != 0) {
906 __ shl(ToRegister(left), shift_count);
907 }
908 break;
909 default:
910 UNREACHABLE();
911 break;
912 }
913 }
914}
915
916
917void LCodeGen::DoSubI(LSubI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000918 LOperand* left = instr->InputAt(0);
919 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000920 ASSERT(left->Equals(instr->result()));
921
922 if (right->IsConstantOperand()) {
923 __ sub(ToOperand(left), ToImmediate(right));
924 } else {
925 __ sub(ToRegister(left), ToOperand(right));
926 }
927 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
928 DeoptimizeIf(overflow, instr->environment());
929 }
930}
931
932
933void LCodeGen::DoConstantI(LConstantI* instr) {
934 ASSERT(instr->result()->IsRegister());
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000935 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000936}
937
938
939void LCodeGen::DoConstantD(LConstantD* instr) {
940 ASSERT(instr->result()->IsDoubleRegister());
941 XMMRegister res = ToDoubleRegister(instr->result());
942 double v = instr->value();
943 // Use xor to produce +0.0 in a fast and compact way, but avoid to
944 // do so if the constant is -0.0.
945 if (BitCast<uint64_t, double>(v) == 0) {
946 __ xorpd(res, res);
947 } else {
948 int32_t v_int32 = static_cast<int32_t>(v);
949 if (static_cast<double>(v_int32) == v) {
950 __ push_imm32(v_int32);
951 __ cvtsi2sd(res, Operand(esp, 0));
952 __ add(Operand(esp), Immediate(kPointerSize));
953 } else {
954 uint64_t int_val = BitCast<uint64_t, double>(v);
955 int32_t lower = static_cast<int32_t>(int_val);
956 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
957 __ push_imm32(upper);
958 __ push_imm32(lower);
959 __ movdbl(res, Operand(esp, 0));
960 __ add(Operand(esp), Immediate(2 * kPointerSize));
961 }
962 }
963}
964
965
966void LCodeGen::DoConstantT(LConstantT* instr) {
967 ASSERT(instr->result()->IsRegister());
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000968 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000969}
970
971
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000972void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000973 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000974 Register array = ToRegister(instr->InputAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000975 __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
976}
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000977
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000978
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000979void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
980 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000981 Register array = ToRegister(instr->InputAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +0000982 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000983}
984
985
986void LCodeGen::DoValueOf(LValueOf* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000987 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000988 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +0000989 Register map = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +0000990 ASSERT(input.is(result));
991 NearLabel done;
992 // If the object is a smi return the object.
993 __ test(input, Immediate(kSmiTagMask));
994 __ j(zero, &done);
995
996 // If the object is not a value type, return the object.
997 __ CmpObjectType(input, JS_VALUE_TYPE, map);
998 __ j(not_equal, &done);
999 __ mov(result, FieldOperand(input, JSValue::kValueOffset));
1000
1001 __ bind(&done);
1002}
1003
1004
1005void LCodeGen::DoBitNotI(LBitNotI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001006 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001007 ASSERT(input->Equals(instr->result()));
1008 __ not_(ToRegister(input));
1009}
1010
1011
1012void LCodeGen::DoThrow(LThrow* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001013 __ push(ToOperand(instr->InputAt(0)));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001014 CallRuntime(Runtime::kThrow, 1, instr);
1015
1016 if (FLAG_debug_code) {
1017 Comment("Unreachable code.");
1018 __ int3();
1019 }
1020}
1021
1022
1023void LCodeGen::DoAddI(LAddI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001024 LOperand* left = instr->InputAt(0);
1025 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001026 ASSERT(left->Equals(instr->result()));
1027
1028 if (right->IsConstantOperand()) {
1029 __ add(ToOperand(left), ToImmediate(right));
1030 } else {
1031 __ add(ToRegister(left), ToOperand(right));
1032 }
1033
1034 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1035 DeoptimizeIf(overflow, instr->environment());
1036 }
1037}
1038
1039
1040void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001041 LOperand* left = instr->InputAt(0);
1042 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001043 // Modulo uses a fixed result register.
1044 ASSERT(instr->op() == Token::MOD || left->Equals(instr->result()));
1045 switch (instr->op()) {
1046 case Token::ADD:
1047 __ addsd(ToDoubleRegister(left), ToDoubleRegister(right));
1048 break;
1049 case Token::SUB:
1050 __ subsd(ToDoubleRegister(left), ToDoubleRegister(right));
1051 break;
1052 case Token::MUL:
1053 __ mulsd(ToDoubleRegister(left), ToDoubleRegister(right));
1054 break;
1055 case Token::DIV:
1056 __ divsd(ToDoubleRegister(left), ToDoubleRegister(right));
1057 break;
1058 case Token::MOD: {
1059 // Pass two doubles as arguments on the stack.
1060 __ PrepareCallCFunction(4, eax);
1061 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
1062 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
1063 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4);
1064
1065 // Return value is in st(0) on ia32.
1066 // Store it into the (fixed) result register.
1067 __ sub(Operand(esp), Immediate(kDoubleSize));
1068 __ fstp_d(Operand(esp, 0));
1069 __ movdbl(ToDoubleRegister(instr->result()), Operand(esp, 0));
1070 __ add(Operand(esp), Immediate(kDoubleSize));
1071 break;
1072 }
1073 default:
1074 UNREACHABLE();
1075 break;
1076 }
1077}
1078
1079
1080void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001081 ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1082 ASSERT(ToRegister(instr->InputAt(1)).is(eax));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001083 ASSERT(ToRegister(instr->result()).is(eax));
1084
1085 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1086 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1087}
1088
1089
1090int LCodeGen::GetNextEmittedBlock(int block) {
1091 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1092 LLabel* label = chunk_->GetLabel(i);
1093 if (!label->HasReplacement()) return i;
1094 }
1095 return -1;
1096}
1097
1098
1099void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1100 int next_block = GetNextEmittedBlock(current_block_);
1101 right_block = chunk_->LookupDestination(right_block);
1102 left_block = chunk_->LookupDestination(left_block);
1103
1104 if (right_block == left_block) {
1105 EmitGoto(left_block);
1106 } else if (left_block == next_block) {
1107 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1108 } else if (right_block == next_block) {
1109 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1110 } else {
1111 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1112 __ jmp(chunk_->GetAssemblyLabel(right_block));
1113 }
1114}
1115
1116
1117void LCodeGen::DoBranch(LBranch* instr) {
1118 int true_block = chunk_->LookupDestination(instr->true_block_id());
1119 int false_block = chunk_->LookupDestination(instr->false_block_id());
1120
1121 Representation r = instr->hydrogen()->representation();
1122 if (r.IsInteger32()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001123 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001124 __ test(reg, Operand(reg));
1125 EmitBranch(true_block, false_block, not_zero);
1126 } else if (r.IsDouble()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001127 XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001128 __ xorpd(xmm0, xmm0);
1129 __ ucomisd(reg, xmm0);
1130 EmitBranch(true_block, false_block, not_equal);
1131 } else {
1132 ASSERT(r.IsTagged());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001133 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001134 if (instr->hydrogen()->type().IsBoolean()) {
1135 __ cmp(reg, Factory::true_value());
1136 EmitBranch(true_block, false_block, equal);
1137 } else {
1138 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1139 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1140
1141 __ cmp(reg, Factory::undefined_value());
1142 __ j(equal, false_label);
1143 __ cmp(reg, Factory::true_value());
1144 __ j(equal, true_label);
1145 __ cmp(reg, Factory::false_value());
1146 __ j(equal, false_label);
1147 __ test(reg, Operand(reg));
1148 __ j(equal, false_label);
1149 __ test(reg, Immediate(kSmiTagMask));
1150 __ j(zero, true_label);
1151
1152 // Test for double values. Zero is false.
1153 NearLabel call_stub;
1154 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1155 Factory::heap_number_map());
1156 __ j(not_equal, &call_stub);
1157 __ fldz();
1158 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1159 __ FCmp();
1160 __ j(zero, false_label);
1161 __ jmp(true_label);
1162
1163 // The conversion stub doesn't cause garbage collections so it's
1164 // safe to not record a safepoint after the call.
1165 __ bind(&call_stub);
1166 ToBooleanStub stub;
1167 __ pushad();
1168 __ push(reg);
1169 __ CallStub(&stub);
1170 __ test(eax, Operand(eax));
1171 __ popad();
1172 EmitBranch(true_block, false_block, not_zero);
1173 }
1174 }
1175}
1176
1177
1178void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1179 block = chunk_->LookupDestination(block);
1180 int next_block = GetNextEmittedBlock(current_block_);
1181 if (block != next_block) {
1182 // Perform stack overflow check if this goto needs it before jumping.
1183 if (deferred_stack_check != NULL) {
1184 ExternalReference stack_limit =
1185 ExternalReference::address_of_stack_limit();
1186 __ cmp(esp, Operand::StaticVariable(stack_limit));
1187 __ j(above_equal, chunk_->GetAssemblyLabel(block));
1188 __ jmp(deferred_stack_check->entry());
1189 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1190 } else {
1191 __ jmp(chunk_->GetAssemblyLabel(block));
1192 }
1193 }
1194}
1195
1196
1197void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1198 __ pushad();
1199 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1200 RecordSafepointWithRegisters(
1201 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1202 __ popad();
1203}
1204
1205void LCodeGen::DoGoto(LGoto* instr) {
1206 class DeferredStackCheck: public LDeferredCode {
1207 public:
1208 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1209 : LDeferredCode(codegen), instr_(instr) { }
1210 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1211 private:
1212 LGoto* instr_;
1213 };
1214
1215 DeferredStackCheck* deferred = NULL;
1216 if (instr->include_stack_check()) {
1217 deferred = new DeferredStackCheck(this, instr);
1218 }
1219 EmitGoto(instr->block_id(), deferred);
1220}
1221
1222
1223Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1224 Condition cond = no_condition;
1225 switch (op) {
1226 case Token::EQ:
1227 case Token::EQ_STRICT:
1228 cond = equal;
1229 break;
1230 case Token::LT:
1231 cond = is_unsigned ? below : less;
1232 break;
1233 case Token::GT:
1234 cond = is_unsigned ? above : greater;
1235 break;
1236 case Token::LTE:
1237 cond = is_unsigned ? below_equal : less_equal;
1238 break;
1239 case Token::GTE:
1240 cond = is_unsigned ? above_equal : greater_equal;
1241 break;
1242 case Token::IN:
1243 case Token::INSTANCEOF:
1244 default:
1245 UNREACHABLE();
1246 }
1247 return cond;
1248}
1249
1250
1251void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1252 if (right->IsConstantOperand()) {
1253 __ cmp(ToOperand(left), ToImmediate(right));
1254 } else {
1255 __ cmp(ToRegister(left), ToOperand(right));
1256 }
1257}
1258
1259
1260void LCodeGen::DoCmpID(LCmpID* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001261 LOperand* left = instr->InputAt(0);
1262 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001263 LOperand* result = instr->result();
1264
1265 NearLabel unordered;
1266 if (instr->is_double()) {
1267 // Don't base result on EFLAGS when a NaN is involved. Instead
1268 // jump to the unordered case, which produces a false value.
1269 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1270 __ j(parity_even, &unordered, not_taken);
1271 } else {
1272 EmitCmpI(left, right);
1273 }
1274
1275 NearLabel done;
1276 Condition cc = TokenToCondition(instr->op(), instr->is_double());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001277 __ mov(ToRegister(result), Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001278 __ j(cc, &done);
1279
1280 __ bind(&unordered);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001281 __ mov(ToRegister(result), Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001282 __ bind(&done);
1283}
1284
1285
1286void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001287 LOperand* left = instr->InputAt(0);
1288 LOperand* right = instr->InputAt(1);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001289 int false_block = chunk_->LookupDestination(instr->false_block_id());
1290 int true_block = chunk_->LookupDestination(instr->true_block_id());
1291
1292 if (instr->is_double()) {
1293 // Don't base result on EFLAGS when a NaN is involved. Instead
1294 // jump to the false block.
1295 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1296 __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
1297 } else {
1298 EmitCmpI(left, right);
1299 }
1300
1301 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1302 EmitBranch(true_block, false_block, cc);
1303}
1304
1305
1306void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001307 Register left = ToRegister(instr->InputAt(0));
1308 Register right = ToRegister(instr->InputAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001309 Register result = ToRegister(instr->result());
1310
1311 __ cmp(left, Operand(right));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001312 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001313 NearLabel done;
1314 __ j(equal, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001315 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001316 __ bind(&done);
1317}
1318
1319
1320void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001321 Register left = ToRegister(instr->InputAt(0));
1322 Register right = ToRegister(instr->InputAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001323 int false_block = chunk_->LookupDestination(instr->false_block_id());
1324 int true_block = chunk_->LookupDestination(instr->true_block_id());
1325
1326 __ cmp(left, Operand(right));
1327 EmitBranch(true_block, false_block, equal);
1328}
1329
1330
1331void LCodeGen::DoIsNull(LIsNull* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001332 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001333 Register result = ToRegister(instr->result());
1334
1335 // TODO(fsc): If the expression is known to be a smi, then it's
1336 // definitely not null. Materialize false.
1337
1338 __ cmp(reg, Factory::null_value());
1339 if (instr->is_strict()) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001340 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001341 NearLabel done;
1342 __ j(equal, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001343 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001344 __ bind(&done);
1345 } else {
1346 NearLabel true_value, false_value, done;
1347 __ j(equal, &true_value);
1348 __ cmp(reg, Factory::undefined_value());
1349 __ j(equal, &true_value);
1350 __ test(reg, Immediate(kSmiTagMask));
1351 __ j(zero, &false_value);
1352 // Check for undetectable objects by looking in the bit field in
1353 // the map. The object has already been smi checked.
1354 Register scratch = result;
1355 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1356 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1357 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1358 __ j(not_zero, &true_value);
1359 __ bind(&false_value);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001360 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001361 __ jmp(&done);
1362 __ bind(&true_value);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001363 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001364 __ bind(&done);
1365 }
1366}
1367
1368
1369void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001370 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001371
1372 // TODO(fsc): If the expression is known to be a smi, then it's
1373 // definitely not null. Jump to the false block.
1374
1375 int true_block = chunk_->LookupDestination(instr->true_block_id());
1376 int false_block = chunk_->LookupDestination(instr->false_block_id());
1377
1378 __ cmp(reg, Factory::null_value());
1379 if (instr->is_strict()) {
1380 EmitBranch(true_block, false_block, equal);
1381 } else {
1382 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1383 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1384 __ j(equal, true_label);
1385 __ cmp(reg, Factory::undefined_value());
1386 __ j(equal, true_label);
1387 __ test(reg, Immediate(kSmiTagMask));
1388 __ j(zero, false_label);
1389 // Check for undetectable objects by looking in the bit field in
1390 // the map. The object has already been smi checked.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001391 Register scratch = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001392 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1393 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1394 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1395 EmitBranch(true_block, false_block, not_zero);
1396 }
1397}
1398
1399
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001400Condition LCodeGen::EmitIsObject(Register input,
1401 Register temp1,
1402 Register temp2,
1403 Label* is_not_object,
1404 Label* is_object) {
1405 ASSERT(!input.is(temp1));
1406 ASSERT(!input.is(temp2));
1407 ASSERT(!temp1.is(temp2));
1408
1409 __ test(input, Immediate(kSmiTagMask));
1410 __ j(equal, is_not_object);
1411
1412 __ cmp(input, Factory::null_value());
1413 __ j(equal, is_object);
1414
1415 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1416 // Undetectable objects behave like undefined.
1417 __ movzx_b(temp2, FieldOperand(temp1, Map::kBitFieldOffset));
1418 __ test(temp2, Immediate(1 << Map::kIsUndetectable));
1419 __ j(not_zero, is_not_object);
1420
1421 __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset));
1422 __ cmp(temp2, FIRST_JS_OBJECT_TYPE);
1423 __ j(below, is_not_object);
1424 __ cmp(temp2, LAST_JS_OBJECT_TYPE);
1425 return below_equal;
1426}
1427
1428
1429void LCodeGen::DoIsObject(LIsObject* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001430 Register reg = ToRegister(instr->InputAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001431 Register result = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001432 Register temp = ToRegister(instr->TempAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001433 Label is_false, is_true, done;
1434
1435 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1436 __ j(true_cond, &is_true);
1437
1438 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001439 __ mov(result, Factory::false_value());
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001440 __ jmp(&done);
1441
1442 __ bind(&is_true);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001443 __ mov(result, Factory::true_value());
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001444
1445 __ bind(&done);
1446}
1447
1448
1449void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001450 Register reg = ToRegister(instr->InputAt(0));
1451 Register temp = ToRegister(instr->TempAt(0));
1452 Register temp2 = ToRegister(instr->TempAt(1));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001453
1454 int true_block = chunk_->LookupDestination(instr->true_block_id());
1455 int false_block = chunk_->LookupDestination(instr->false_block_id());
1456 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1457 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1458
1459 Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label);
1460
1461 EmitBranch(true_block, false_block, true_cond);
1462}
1463
1464
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001465void LCodeGen::DoIsSmi(LIsSmi* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001466 Operand input = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001467 Register result = ToRegister(instr->result());
1468
1469 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1470 __ test(input, Immediate(kSmiTagMask));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001471 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001472 NearLabel done;
1473 __ j(zero, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001474 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001475 __ bind(&done);
1476}
1477
1478
1479void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001480 Operand input = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001481
1482 int true_block = chunk_->LookupDestination(instr->true_block_id());
1483 int false_block = chunk_->LookupDestination(instr->false_block_id());
1484
1485 __ test(input, Immediate(kSmiTagMask));
1486 EmitBranch(true_block, false_block, zero);
1487}
1488
1489
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001490static InstanceType TestType(HHasInstanceType* instr) {
1491 InstanceType from = instr->from();
1492 InstanceType to = instr->to();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001493 if (from == FIRST_TYPE) return to;
1494 ASSERT(from == to || to == LAST_TYPE);
1495 return from;
1496}
1497
1498
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001499static Condition BranchCondition(HHasInstanceType* instr) {
1500 InstanceType from = instr->from();
1501 InstanceType to = instr->to();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001502 if (from == to) return equal;
1503 if (to == LAST_TYPE) return above_equal;
1504 if (from == FIRST_TYPE) return below_equal;
1505 UNREACHABLE();
1506 return equal;
1507}
1508
1509
1510void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001511 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001512 Register result = ToRegister(instr->result());
1513
1514 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1515 __ test(input, Immediate(kSmiTagMask));
1516 NearLabel done, is_false;
1517 __ j(zero, &is_false);
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001518 __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1519 __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001520 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001521 __ jmp(&done);
1522 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001523 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001524 __ bind(&done);
1525}
1526
1527
1528void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001529 Register input = ToRegister(instr->InputAt(0));
1530 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001531
1532 int true_block = chunk_->LookupDestination(instr->true_block_id());
1533 int false_block = chunk_->LookupDestination(instr->false_block_id());
1534
1535 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1536
1537 __ test(input, Immediate(kSmiTagMask));
1538 __ j(zero, false_label);
1539
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001540 __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1541 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001542}
1543
1544
1545void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001546 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001547 Register result = ToRegister(instr->result());
1548
1549 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001550 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001551 __ test(FieldOperand(input, String::kHashFieldOffset),
1552 Immediate(String::kContainsCachedArrayIndexMask));
1553 NearLabel done;
1554 __ j(not_zero, &done);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001555 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001556 __ bind(&done);
1557}
1558
1559
1560void LCodeGen::DoHasCachedArrayIndexAndBranch(
1561 LHasCachedArrayIndexAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001562 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001563
1564 int true_block = chunk_->LookupDestination(instr->true_block_id());
1565 int false_block = chunk_->LookupDestination(instr->false_block_id());
1566
1567 __ test(FieldOperand(input, String::kHashFieldOffset),
1568 Immediate(String::kContainsCachedArrayIndexMask));
1569 EmitBranch(true_block, false_block, not_equal);
1570}
1571
1572
1573// Branches to a label or falls through with the answer in the z flag. Trashes
1574// the temp registers, but not the input. Only input and temp2 may alias.
1575void LCodeGen::EmitClassOfTest(Label* is_true,
1576 Label* is_false,
1577 Handle<String>class_name,
1578 Register input,
1579 Register temp,
1580 Register temp2) {
1581 ASSERT(!input.is(temp));
1582 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1583 __ test(input, Immediate(kSmiTagMask));
1584 __ j(zero, is_false);
1585 __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1586 __ j(below, is_false);
1587
1588 // Map is now in temp.
1589 // Functions have class 'Function'.
1590 __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1591 if (class_name->IsEqualTo(CStrVector("Function"))) {
1592 __ j(equal, is_true);
1593 } else {
1594 __ j(equal, is_false);
1595 }
1596
1597 // Check if the constructor in the map is a function.
1598 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1599
1600 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1601 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1602 // LAST_JS_OBJECT_TYPE.
1603 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1604 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1605
1606 // Objects with a non-function constructor have class 'Object'.
1607 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1608 if (class_name->IsEqualTo(CStrVector("Object"))) {
1609 __ j(not_equal, is_true);
1610 } else {
1611 __ j(not_equal, is_false);
1612 }
1613
1614 // temp now contains the constructor function. Grab the
1615 // instance class name from there.
1616 __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1617 __ mov(temp, FieldOperand(temp,
1618 SharedFunctionInfo::kInstanceClassNameOffset));
1619 // The class name we are testing against is a symbol because it's a literal.
1620 // The name in the constructor is a symbol because of the way the context is
1621 // booted. This routine isn't expected to work for random API-created
1622 // classes and it doesn't have to because you can't access it with natives
1623 // syntax. Since both sides are symbols it is sufficient to use an identity
1624 // comparison.
1625 __ cmp(temp, class_name);
1626 // End with the answer in the z flag.
1627}
1628
1629
1630void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001631 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001632 Register result = ToRegister(instr->result());
1633 ASSERT(input.is(result));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001634 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001635 Handle<String> class_name = instr->hydrogen()->class_name();
1636 NearLabel done;
1637 Label is_true, is_false;
1638
1639 EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input);
1640
1641 __ j(not_equal, &is_false);
1642
1643 __ bind(&is_true);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001644 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001645 __ jmp(&done);
1646
1647 __ bind(&is_false);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00001648 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001649 __ bind(&done);
1650}
1651
1652
1653void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001654 Register input = ToRegister(instr->InputAt(0));
1655 Register temp = ToRegister(instr->TempAt(0));
1656 Register temp2 = ToRegister(instr->TempAt(1));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001657 if (input.is(temp)) {
1658 // Swap.
1659 Register swapper = temp;
1660 temp = temp2;
1661 temp2 = swapper;
1662 }
1663 Handle<String> class_name = instr->hydrogen()->class_name();
1664
1665 int true_block = chunk_->LookupDestination(instr->true_block_id());
1666 int false_block = chunk_->LookupDestination(instr->false_block_id());
1667
1668 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1669 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1670
1671 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1672
1673 EmitBranch(true_block, false_block, equal);
1674}
1675
1676
1677void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001678 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001679 int true_block = instr->true_block_id();
1680 int false_block = instr->false_block_id();
1681
1682 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1683 EmitBranch(true_block, false_block, equal);
1684}
1685
1686
1687void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001688 // Object and function are in fixed registers defined by the stub.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001689 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001690 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1691
1692 NearLabel true_value, done;
1693 __ test(eax, Operand(eax));
1694 __ j(zero, &true_value);
1695 __ mov(ToRegister(instr->result()), Factory::false_value());
1696 __ jmp(&done);
1697 __ bind(&true_value);
1698 __ mov(ToRegister(instr->result()), Factory::true_value());
1699 __ bind(&done);
1700}
1701
1702
1703void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1704 int true_block = chunk_->LookupDestination(instr->true_block_id());
1705 int false_block = chunk_->LookupDestination(instr->false_block_id());
1706
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00001707 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001708 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1709 __ test(eax, Operand(eax));
1710 EmitBranch(true_block, false_block, zero);
1711}
1712
1713
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001714void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1715 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1716 public:
1717 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1718 LInstanceOfKnownGlobal* instr)
1719 : LDeferredCode(codegen), instr_(instr) { }
1720 virtual void Generate() {
1721 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1722 }
1723
1724 Label* map_check() { return &map_check_; }
1725
1726 private:
1727 LInstanceOfKnownGlobal* instr_;
1728 Label map_check_;
1729 };
1730
1731 DeferredInstanceOfKnownGlobal* deferred;
1732 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1733
1734 Label done, false_result;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001735 Register object = ToRegister(instr->InputAt(0));
1736 Register temp = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001737
1738 // A Smi is not instance of anything.
1739 __ test(object, Immediate(kSmiTagMask));
1740 __ j(zero, &false_result, not_taken);
1741
1742 // This is the inlined call site instanceof cache. The two occourences of the
1743 // hole value will be patched to the last map/result pair generated by the
1744 // instanceof stub.
1745 NearLabel cache_miss;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001746 Register map = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001747 __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
1748 __ bind(deferred->map_check()); // Label for calculating code patching.
1749 __ cmp(map, Factory::the_hole_value()); // Patched to cached map.
1750 __ j(not_equal, &cache_miss, not_taken);
1751 __ mov(eax, Factory::the_hole_value()); // Patched to either true or false.
1752 __ jmp(&done);
1753
1754 // The inlined call site cache did not match. Check null and string before
1755 // calling the deferred code.
1756 __ bind(&cache_miss);
1757 // Null is not instance of anything.
1758 __ cmp(object, Factory::null_value());
1759 __ j(equal, &false_result);
1760
1761 // String values are not instances of anything.
1762 Condition is_string = masm_->IsObjectStringType(object, temp, temp);
1763 __ j(is_string, &false_result);
1764
1765 // Go to the deferred code.
1766 __ jmp(deferred->entry());
1767
1768 __ bind(&false_result);
1769 __ mov(ToRegister(instr->result()), Factory::false_value());
1770
1771 // Here result has either true or false. Deferred code also produces true or
1772 // false object.
1773 __ bind(deferred->exit());
1774 __ bind(&done);
1775}
1776
1777
1778void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1779 Label* map_check) {
1780 __ PushSafepointRegisters();
1781
1782 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1783 flags = static_cast<InstanceofStub::Flags>(
1784 flags | InstanceofStub::kArgsInRegisters);
1785 flags = static_cast<InstanceofStub::Flags>(
1786 flags | InstanceofStub::kCallSiteInlineCheck);
1787 flags = static_cast<InstanceofStub::Flags>(
1788 flags | InstanceofStub::kReturnTrueFalseObject);
1789 InstanceofStub stub(flags);
1790
1791 // Get the temp register reserved by the instruction. This needs to be edi as
1792 // its slot of the pushing of safepoint registers is used to communicate the
1793 // offset to the location of the map check.
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001794 Register temp = ToRegister(instr->TempAt(0));
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00001795 ASSERT(temp.is(edi));
1796 __ mov(InstanceofStub::right(), Immediate(instr->function()));
1797 static const int kAdditionalDelta = 13;
1798 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1799 Label before_push_delta;
1800 __ bind(&before_push_delta);
1801 __ mov(temp, Immediate(delta));
1802 __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp);
1803 __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
1804 ASSERT_EQ(kAdditionalDelta,
1805 masm_->SizeOfCodeGeneratedSince(&before_push_delta));
1806 RecordSafepointWithRegisters(
1807 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1808 // Put the result value into the eax slot and restore all registers.
1809 __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax);
1810
1811 __ PopSafepointRegisters();
1812}
1813
1814
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001815static Condition ComputeCompareCondition(Token::Value op) {
1816 switch (op) {
1817 case Token::EQ_STRICT:
1818 case Token::EQ:
1819 return equal;
1820 case Token::LT:
1821 return less;
1822 case Token::GT:
1823 return greater;
1824 case Token::LTE:
1825 return less_equal;
1826 case Token::GTE:
1827 return greater_equal;
1828 default:
1829 UNREACHABLE();
1830 return no_condition;
1831 }
1832}
1833
1834
1835void LCodeGen::DoCmpT(LCmpT* instr) {
1836 Token::Value op = instr->op();
1837
1838 Handle<Code> ic = CompareIC::GetUninitialized(op);
1839 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1840
1841 Condition condition = ComputeCompareCondition(op);
1842 if (op == Token::GT || op == Token::LTE) {
1843 condition = ReverseCondition(condition);
1844 }
1845 NearLabel true_value, done;
1846 __ test(eax, Operand(eax));
1847 __ j(condition, &true_value);
1848 __ mov(ToRegister(instr->result()), Factory::false_value());
1849 __ jmp(&done);
1850 __ bind(&true_value);
1851 __ mov(ToRegister(instr->result()), Factory::true_value());
1852 __ bind(&done);
1853}
1854
1855
1856void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1857 Token::Value op = instr->op();
1858 int true_block = chunk_->LookupDestination(instr->true_block_id());
1859 int false_block = chunk_->LookupDestination(instr->false_block_id());
1860
1861 Handle<Code> ic = CompareIC::GetUninitialized(op);
1862 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1863
1864 // The compare stub expects compare condition and the input operands
1865 // reversed for GT and LTE.
1866 Condition condition = ComputeCompareCondition(op);
1867 if (op == Token::GT || op == Token::LTE) {
1868 condition = ReverseCondition(condition);
1869 }
1870 __ test(eax, Operand(eax));
1871 EmitBranch(true_block, false_block, condition);
1872}
1873
1874
1875void LCodeGen::DoReturn(LReturn* instr) {
1876 if (FLAG_trace) {
1877 // Preserve the return value on the stack and rely on the runtime
1878 // call to return the value in the same register.
1879 __ push(eax);
1880 __ CallRuntime(Runtime::kTraceExit, 1);
1881 }
1882 __ mov(esp, ebp);
1883 __ pop(ebp);
1884 __ ret((ParameterCount() + 1) * kPointerSize);
1885}
1886
1887
1888void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) {
1889 Register result = ToRegister(instr->result());
1890 __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
1891 if (instr->hydrogen()->check_hole_value()) {
1892 __ cmp(result, Factory::the_hole_value());
1893 DeoptimizeIf(equal, instr->environment());
1894 }
1895}
1896
1897
1898void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001899 Register value = ToRegister(instr->InputAt(0));
ager@chromium.org378b34e2011-01-28 08:04:38 +00001900 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell());
1901
1902 // If the cell we are storing to contains the hole it could have
1903 // been deleted from the property dictionary. In that case, we need
1904 // to update the property details in the property dictionary to mark
1905 // it as no longer deleted. We deoptimize in that case.
1906 if (instr->hydrogen()->check_hole_value()) {
1907 __ cmp(cell_operand, Factory::the_hole_value());
1908 DeoptimizeIf(equal, instr->environment());
1909 }
1910
1911 // Store the value.
1912 __ mov(cell_operand, value);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001913}
1914
1915
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001916void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
ricow@chromium.org83aa5492011-02-07 12:42:56 +00001917 Register context = ToRegister(instr->context());
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001918 Register result = ToRegister(instr->result());
ricow@chromium.org83aa5492011-02-07 12:42:56 +00001919 __ mov(result, ContextOperand(context, instr->slot_index()));
1920}
1921
1922
1923void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
1924 Register context = ToRegister(instr->context());
1925 Register value = ToRegister(instr->value());
1926 __ mov(ContextOperand(context, instr->slot_index()), value);
1927 if (instr->needs_write_barrier()) {
1928 Register temp = ToRegister(instr->TempAt(0));
1929 int offset = Context::SlotOffset(instr->slot_index());
1930 __ RecordWrite(context, offset, value, temp);
1931 }
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00001932}
1933
1934
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001935void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001936 Register object = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001937 Register result = ToRegister(instr->result());
1938 if (instr->hydrogen()->is_in_object()) {
1939 __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
1940 } else {
1941 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
1942 __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
1943 }
1944}
1945
1946
1947void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1948 ASSERT(ToRegister(instr->object()).is(eax));
1949 ASSERT(ToRegister(instr->result()).is(eax));
1950
1951 __ mov(ecx, instr->name());
1952 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1953 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1954}
1955
1956
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +00001957void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
1958 Register function = ToRegister(instr->function());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00001959 Register temp = ToRegister(instr->TempAt(0));
fschneider@chromium.org9e3e0b62011-01-03 10:16:46 +00001960 Register result = ToRegister(instr->result());
1961
1962 // Check that the function really is a function.
1963 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
1964 DeoptimizeIf(not_equal, instr->environment());
1965
1966 // Check whether the function has an instance prototype.
1967 NearLabel non_instance;
1968 __ test_b(FieldOperand(result, Map::kBitFieldOffset),
1969 1 << Map::kHasNonInstancePrototype);
1970 __ j(not_zero, &non_instance);
1971
1972 // Get the prototype or initial map from the function.
1973 __ mov(result,
1974 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1975
1976 // Check that the function has a prototype or an initial map.
1977 __ cmp(Operand(result), Immediate(Factory::the_hole_value()));
1978 DeoptimizeIf(equal, instr->environment());
1979
1980 // If the function does not have an initial map, we're done.
1981 NearLabel done;
1982 __ CmpObjectType(result, MAP_TYPE, temp);
1983 __ j(not_equal, &done);
1984
1985 // Get the prototype from the initial map.
1986 __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
1987 __ jmp(&done);
1988
1989 // Non-instance prototype: Fetch prototype from constructor field
1990 // in the function's map.
1991 __ bind(&non_instance);
1992 __ mov(result, FieldOperand(result, Map::kConstructorOffset));
1993
1994 // All done.
1995 __ bind(&done);
1996}
1997
1998
kasperl@chromium.orga5551262010-12-07 12:49:48 +00001999void LCodeGen::DoLoadElements(LLoadElements* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002000 ASSERT(instr->result()->Equals(instr->InputAt(0)));
2001 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002002 __ mov(reg, FieldOperand(reg, JSObject::kElementsOffset));
2003 if (FLAG_debug_code) {
2004 NearLabel done;
2005 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
2006 Immediate(Factory::fixed_array_map()));
2007 __ j(equal, &done);
2008 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
2009 Immediate(Factory::fixed_cow_array_map()));
2010 __ Check(equal, "Check for fast elements failed.");
2011 __ bind(&done);
2012 }
2013}
2014
2015
2016void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2017 Register arguments = ToRegister(instr->arguments());
2018 Register length = ToRegister(instr->length());
2019 Operand index = ToOperand(instr->index());
2020 Register result = ToRegister(instr->result());
2021
2022 __ sub(length, index);
2023 DeoptimizeIf(below_equal, instr->environment());
2024
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002025 // There are two words between the frame pointer and the last argument.
2026 // Subtracting from length accounts for one of them add one more.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002027 __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2028}
2029
2030
2031void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2032 Register elements = ToRegister(instr->elements());
2033 Register key = ToRegister(instr->key());
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00002034 Register result = ToRegister(instr->result());
2035 ASSERT(result.is(elements));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002036
2037 // Load the result.
2038 __ mov(result, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize));
2039
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00002040 // Check for the hole value.
2041 __ cmp(result, Factory::the_hole_value());
2042 DeoptimizeIf(equal, instr->environment());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002043}
2044
2045
2046void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2047 ASSERT(ToRegister(instr->object()).is(edx));
2048 ASSERT(ToRegister(instr->key()).is(eax));
2049
2050 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2051 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2052}
2053
2054
2055void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2056 Register result = ToRegister(instr->result());
2057
2058 // Check for arguments adapter frame.
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002059 NearLabel done, adapted;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002060 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2061 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
2062 __ cmp(Operand(result),
2063 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2064 __ j(equal, &adapted);
2065
2066 // No arguments adaptor frame.
2067 __ mov(result, Operand(ebp));
2068 __ jmp(&done);
2069
2070 // Arguments adaptor frame present.
2071 __ bind(&adapted);
2072 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2073
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002074 // Result is the frame pointer for the frame if not adapted and for the real
2075 // frame below the adaptor frame if adapted.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002076 __ bind(&done);
2077}
2078
2079
2080void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002081 Operand elem = ToOperand(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002082 Register result = ToRegister(instr->result());
2083
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002084 NearLabel done;
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002085
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002086 // If no arguments adaptor frame the number of arguments is fixed.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002087 __ cmp(ebp, elem);
2088 __ mov(result, Immediate(scope()->num_parameters()));
2089 __ j(equal, &done);
2090
2091 // Arguments adaptor frame present. Get argument length from there.
2092 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2093 __ mov(result, Operand(result,
2094 ArgumentsAdaptorFrameConstants::kLengthOffset));
2095 __ SmiUntag(result);
2096
kmillikin@chromium.orgd2c22f02011-01-10 08:15:37 +00002097 // Argument length is in result register.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002098 __ bind(&done);
2099}
2100
2101
2102void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2103 Register receiver = ToRegister(instr->receiver());
2104 ASSERT(ToRegister(instr->function()).is(edi));
2105 ASSERT(ToRegister(instr->result()).is(eax));
2106
2107 // If the receiver is null or undefined, we have to pass the
2108 // global object as a receiver.
2109 NearLabel global_receiver, receiver_ok;
2110 __ cmp(receiver, Factory::null_value());
2111 __ j(equal, &global_receiver);
2112 __ cmp(receiver, Factory::undefined_value());
2113 __ j(not_equal, &receiver_ok);
2114 __ bind(&global_receiver);
2115 __ mov(receiver, GlobalObjectOperand());
2116 __ bind(&receiver_ok);
2117
2118 Register length = ToRegister(instr->length());
2119 Register elements = ToRegister(instr->elements());
2120
2121 Label invoke;
2122
2123 // Copy the arguments to this function possibly from the
2124 // adaptor frame below it.
2125 const uint32_t kArgumentsLimit = 1 * KB;
2126 __ cmp(length, kArgumentsLimit);
2127 DeoptimizeIf(above, instr->environment());
2128
2129 __ push(receiver);
2130 __ mov(receiver, length);
2131
2132 // Loop through the arguments pushing them onto the execution
2133 // stack.
2134 Label loop;
2135 // length is a small non-negative integer, due to the test above.
2136 __ test(length, Operand(length));
2137 __ j(zero, &invoke);
2138 __ bind(&loop);
2139 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2140 __ dec(length);
2141 __ j(not_zero, &loop);
2142
2143 // Invoke the function.
2144 __ bind(&invoke);
kmillikin@chromium.org31b12772011-02-02 16:08:26 +00002145 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2146 LPointerMap* pointers = instr->pointer_map();
2147 LEnvironment* env = instr->deoptimization_environment();
2148 RecordPosition(pointers->position());
2149 RegisterEnvironmentForDeoptimization(env);
2150 SafepointGenerator safepoint_generator(this,
2151 pointers,
2152 env->deoptimization_index());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002153 ASSERT(receiver.is(eax));
2154 v8::internal::ParameterCount actual(eax);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002155 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator);
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002156
2157 // Restore context.
2158 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002159}
2160
2161
2162void LCodeGen::DoPushArgument(LPushArgument* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002163 LOperand* argument = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002164 if (argument->IsConstantOperand()) {
2165 __ push(ToImmediate(argument));
2166 } else {
2167 __ push(ToOperand(argument));
2168 }
2169}
2170
2171
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002172void LCodeGen::DoContext(LContext* instr) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002173 Register result = ToRegister(instr->result());
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002174 __ mov(result, esi);
2175}
2176
2177
2178void LCodeGen::DoOuterContext(LOuterContext* instr) {
2179 Register context = ToRegister(instr->context());
2180 Register result = ToRegister(instr->result());
2181 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2182 __ mov(result, FieldOperand(result, JSFunction::kContextOffset));
2183}
2184
2185
2186void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2187 Register context = ToRegister(instr->context());
2188 Register result = ToRegister(instr->result());
2189 __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002190}
2191
2192
2193void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002194 Register global = ToRegister(instr->global());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002195 Register result = ToRegister(instr->result());
ricow@chromium.org83aa5492011-02-07 12:42:56 +00002196 __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002197}
2198
2199
2200void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2201 int arity,
2202 LInstruction* instr) {
2203 // Change context if needed.
2204 bool change_context =
2205 (graph()->info()->closure()->context() != function->context()) ||
2206 scope()->contains_with() ||
2207 (scope()->num_heap_slots() > 0);
2208 if (change_context) {
2209 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2210 }
2211
2212 // Set eax to arguments count if adaption is not needed. Assumes that eax
2213 // is available to write to at this point.
2214 if (!function->NeedsArgumentsAdaption()) {
2215 __ mov(eax, arity);
2216 }
2217
2218 LPointerMap* pointers = instr->pointer_map();
2219 RecordPosition(pointers->position());
2220
2221 // Invoke function.
2222 if (*function == *graph()->info()->closure()) {
2223 __ CallSelf();
2224 } else {
2225 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2226 }
2227
2228 // Setup deoptimization.
2229 RegisterLazyDeoptimization(instr);
2230
2231 // Restore context.
2232 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2233}
2234
2235
2236void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2237 ASSERT(ToRegister(instr->result()).is(eax));
2238 __ mov(edi, instr->function());
2239 CallKnownFunction(instr->function(), instr->arity(), instr);
2240}
2241
2242
2243void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002244 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002245 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2246 Factory::heap_number_map());
2247 DeoptimizeIf(not_equal, instr->environment());
2248
2249 Label done;
2250 Register tmp = input_reg.is(eax) ? ecx : eax;
2251 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2252
2253 // Preserve the value of all registers.
2254 __ PushSafepointRegisters();
2255
2256 Label negative;
2257 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002258 // Check the sign of the argument. If the argument is positive, just
2259 // return it. We do not need to patch the stack since |input| and
2260 // |result| are the same register and |input| will be restored
2261 // unchanged by popping safepoint registers.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002262 __ test(tmp, Immediate(HeapNumber::kSignMask));
2263 __ j(not_zero, &negative);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002264 __ jmp(&done);
2265
2266 __ bind(&negative);
2267
2268 Label allocated, slow;
2269 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2270 __ jmp(&allocated);
2271
2272 // Slow case: Call the runtime system to do the number allocation.
2273 __ bind(&slow);
2274
2275 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2276 RecordSafepointWithRegisters(
2277 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2278 // Set the pointer to the new heap number in tmp.
2279 if (!tmp.is(eax)) __ mov(tmp, eax);
2280
2281 // Restore input_reg after call to runtime.
2282 __ mov(input_reg, Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize));
2283
2284 __ bind(&allocated);
2285 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2286 __ and_(tmp2, ~HeapNumber::kSignMask);
2287 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2288 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2289 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002290 __ mov(Operand(esp, EspIndexForPushAll(input_reg) * kPointerSize), tmp);
2291
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002292 __ bind(&done);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002293 __ PopSafepointRegisters();
2294}
2295
2296
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002297void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2298 Register input_reg = ToRegister(instr->InputAt(0));
2299 __ test(input_reg, Operand(input_reg));
2300 Label is_positive;
2301 __ j(not_sign, &is_positive);
2302 __ neg(input_reg);
2303 __ test(input_reg, Operand(input_reg));
2304 DeoptimizeIf(negative, instr->environment());
2305 __ bind(&is_positive);
2306}
2307
2308
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002309void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2310 // Class for deferred case.
2311 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2312 public:
2313 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2314 LUnaryMathOperation* instr)
2315 : LDeferredCode(codegen), instr_(instr) { }
2316 virtual void Generate() {
2317 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2318 }
2319 private:
2320 LUnaryMathOperation* instr_;
2321 };
2322
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002323 ASSERT(instr->InputAt(0)->Equals(instr->result()));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002324 Representation r = instr->hydrogen()->value()->representation();
2325
2326 if (r.IsDouble()) {
2327 XMMRegister scratch = xmm0;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002328 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002329 __ pxor(scratch, scratch);
2330 __ subsd(scratch, input_reg);
2331 __ pand(input_reg, scratch);
2332 } else if (r.IsInteger32()) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002333 EmitIntegerMathAbs(instr);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002334 } else { // Tagged case.
2335 DeferredMathAbsTaggedHeapNumber* deferred =
2336 new DeferredMathAbsTaggedHeapNumber(this, instr);
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002337 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002338 // Smi check.
2339 __ test(input_reg, Immediate(kSmiTagMask));
2340 __ j(not_zero, deferred->entry());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002341 EmitIntegerMathAbs(instr);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002342 __ bind(deferred->exit());
2343 }
2344}
2345
2346
2347void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2348 XMMRegister xmm_scratch = xmm0;
2349 Register output_reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002350 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002351 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2352 __ ucomisd(input_reg, xmm_scratch);
2353
2354 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2355 DeoptimizeIf(below_equal, instr->environment());
2356 } else {
2357 DeoptimizeIf(below, instr->environment());
2358 }
2359
2360 // Use truncating instruction (OK because input is positive).
2361 __ cvttsd2si(output_reg, Operand(input_reg));
2362
2363 // Overflow is signalled with minint.
2364 __ cmp(output_reg, 0x80000000u);
2365 DeoptimizeIf(equal, instr->environment());
2366}
2367
2368
2369void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2370 XMMRegister xmm_scratch = xmm0;
2371 Register output_reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002372 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002373
2374 // xmm_scratch = 0.5
2375 ExternalReference one_half = ExternalReference::address_of_one_half();
2376 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
2377
2378 // input = input + 0.5
2379 __ addsd(input_reg, xmm_scratch);
2380
2381 // We need to return -0 for the input range [-0.5, 0[, otherwise
2382 // compute Math.floor(value + 0.5).
2383 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2384 __ ucomisd(input_reg, xmm_scratch);
2385 DeoptimizeIf(below_equal, instr->environment());
2386 } else {
2387 // If we don't need to bailout on -0, we check only bailout
2388 // on negative inputs.
2389 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2390 __ ucomisd(input_reg, xmm_scratch);
2391 DeoptimizeIf(below, instr->environment());
2392 }
2393
2394 // Compute Math.floor(value + 0.5).
2395 // Use truncating instruction (OK because input is positive).
2396 __ cvttsd2si(output_reg, Operand(input_reg));
2397
2398 // Overflow is signalled with minint.
2399 __ cmp(output_reg, 0x80000000u);
2400 DeoptimizeIf(equal, instr->environment());
2401}
2402
2403
2404void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002405 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002406 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2407 __ sqrtsd(input_reg, input_reg);
2408}
2409
2410
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002411void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2412 XMMRegister xmm_scratch = xmm0;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002413 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002414 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2415 ExternalReference negative_infinity =
2416 ExternalReference::address_of_negative_infinity();
2417 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity));
2418 __ ucomisd(xmm_scratch, input_reg);
2419 DeoptimizeIf(equal, instr->environment());
kmillikin@chromium.org31b12772011-02-02 16:08:26 +00002420 __ xorpd(xmm_scratch, xmm_scratch);
2421 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002422 __ sqrtsd(input_reg, input_reg);
2423}
2424
2425
2426void LCodeGen::DoPower(LPower* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002427 LOperand* left = instr->InputAt(0);
2428 LOperand* right = instr->InputAt(1);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002429 DoubleRegister result_reg = ToDoubleRegister(instr->result());
2430 Representation exponent_type = instr->hydrogen()->right()->representation();
2431 if (exponent_type.IsDouble()) {
2432 // It is safe to use ebx directly since the instruction is marked
2433 // as a call.
2434 __ PrepareCallCFunction(4, ebx);
2435 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2436 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
2437 __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2438 } else if (exponent_type.IsInteger32()) {
2439 // It is safe to use ebx directly since the instruction is marked
2440 // as a call.
2441 ASSERT(!ToRegister(right).is(ebx));
2442 __ PrepareCallCFunction(4, ebx);
2443 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2444 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
2445 __ CallCFunction(ExternalReference::power_double_int_function(), 4);
2446 } else {
2447 ASSERT(exponent_type.IsTagged());
2448 CpuFeatures::Scope scope(SSE2);
2449 Register right_reg = ToRegister(right);
2450
2451 Label non_smi, call;
2452 __ test(right_reg, Immediate(kSmiTagMask));
2453 __ j(not_zero, &non_smi);
2454 __ SmiUntag(right_reg);
2455 __ cvtsi2sd(result_reg, Operand(right_reg));
2456 __ jmp(&call);
2457
2458 __ bind(&non_smi);
2459 // It is safe to use ebx directly since the instruction is marked
2460 // as a call.
2461 ASSERT(!right_reg.is(ebx));
2462 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx);
2463 DeoptimizeIf(not_equal, instr->environment());
2464 __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset));
2465
2466 __ bind(&call);
2467 __ PrepareCallCFunction(4, ebx);
2468 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2469 __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
2470 __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2471 }
2472
2473 // Return value is in st(0) on ia32.
2474 // Store it into the (fixed) result register.
2475 __ sub(Operand(esp), Immediate(kDoubleSize));
2476 __ fstp_d(Operand(esp, 0));
2477 __ movdbl(result_reg, Operand(esp, 0));
2478 __ add(Operand(esp), Immediate(kDoubleSize));
2479}
2480
2481
2482void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2483 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
whesse@chromium.org023421e2010-12-21 12:19:12 +00002484 TranscendentalCacheStub stub(TranscendentalCache::LOG,
2485 TranscendentalCacheStub::UNTAGGED);
2486 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2487}
2488
2489
2490void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2491 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2492 TranscendentalCacheStub stub(TranscendentalCache::COS,
2493 TranscendentalCacheStub::UNTAGGED);
2494 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2495}
2496
2497
2498void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2499 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2500 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2501 TranscendentalCacheStub::UNTAGGED);
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002502 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2503}
2504
2505
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002506void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2507 switch (instr->op()) {
2508 case kMathAbs:
2509 DoMathAbs(instr);
2510 break;
2511 case kMathFloor:
2512 DoMathFloor(instr);
2513 break;
2514 case kMathRound:
2515 DoMathRound(instr);
2516 break;
2517 case kMathSqrt:
2518 DoMathSqrt(instr);
2519 break;
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002520 case kMathPowHalf:
2521 DoMathPowHalf(instr);
2522 break;
whesse@chromium.org023421e2010-12-21 12:19:12 +00002523 case kMathCos:
2524 DoMathCos(instr);
2525 break;
2526 case kMathSin:
2527 DoMathSin(instr);
2528 break;
ager@chromium.org5f0c45f2010-12-17 08:51:21 +00002529 case kMathLog:
2530 DoMathLog(instr);
2531 break;
2532
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002533 default:
2534 UNREACHABLE();
2535 }
2536}
2537
2538
2539void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2540 ASSERT(ToRegister(instr->result()).is(eax));
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002541 ASSERT(ToRegister(instr->InputAt(0)).is(ecx));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002542
2543 int arity = instr->arity();
2544 Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2545 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2546 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2547}
2548
2549
2550void LCodeGen::DoCallNamed(LCallNamed* instr) {
2551 ASSERT(ToRegister(instr->result()).is(eax));
2552
2553 int arity = instr->arity();
2554 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2555 __ mov(ecx, instr->name());
2556 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2557 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2558}
2559
2560
2561void LCodeGen::DoCallFunction(LCallFunction* instr) {
2562 ASSERT(ToRegister(instr->result()).is(eax));
2563
2564 int arity = instr->arity();
2565 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2566 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2567 __ Drop(1);
2568 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2569}
2570
2571
2572void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2573 ASSERT(ToRegister(instr->result()).is(eax));
2574
2575 int arity = instr->arity();
2576 Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
2577 __ mov(ecx, instr->name());
2578 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2579 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2580}
2581
2582
2583void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2584 ASSERT(ToRegister(instr->result()).is(eax));
2585 __ mov(edi, instr->target());
2586 CallKnownFunction(instr->target(), instr->arity(), instr);
2587}
2588
2589
2590void LCodeGen::DoCallNew(LCallNew* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002591 ASSERT(ToRegister(instr->InputAt(0)).is(edi));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002592 ASSERT(ToRegister(instr->result()).is(eax));
2593
2594 Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
2595 __ Set(eax, Immediate(instr->arity()));
2596 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2597}
2598
2599
2600void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2601 CallRuntime(instr->function(), instr->arity(), instr);
2602}
2603
2604
2605void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2606 Register object = ToRegister(instr->object());
2607 Register value = ToRegister(instr->value());
2608 int offset = instr->offset();
2609
2610 if (!instr->transition().is_null()) {
2611 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
2612 }
2613
2614 // Do the store.
2615 if (instr->is_in_object()) {
2616 __ mov(FieldOperand(object, offset), value);
2617 if (instr->needs_write_barrier()) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002618 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002619 // Update the write barrier for the object for in-object properties.
2620 __ RecordWrite(object, offset, value, temp);
2621 }
2622 } else {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002623 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002624 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
2625 __ mov(FieldOperand(temp, offset), value);
2626 if (instr->needs_write_barrier()) {
2627 // Update the write barrier for the properties array.
2628 // object is used as a scratch register.
2629 __ RecordWrite(temp, offset, value, object);
2630 }
2631 }
2632}
2633
2634
2635void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2636 ASSERT(ToRegister(instr->object()).is(edx));
2637 ASSERT(ToRegister(instr->value()).is(eax));
2638
2639 __ mov(ecx, instr->name());
2640 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
2641 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2642}
2643
2644
2645void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2646 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
2647 DeoptimizeIf(above_equal, instr->environment());
2648}
2649
2650
2651void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
2652 Register value = ToRegister(instr->value());
2653 Register elements = ToRegister(instr->object());
2654 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
2655
2656 // Do the store.
2657 if (instr->key()->IsConstantOperand()) {
2658 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
2659 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
2660 int offset =
2661 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
2662 __ mov(FieldOperand(elements, offset), value);
2663 } else {
2664 __ mov(FieldOperand(elements, key, times_4, FixedArray::kHeaderSize),
2665 value);
2666 }
2667
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002668 if (instr->hydrogen()->NeedsWriteBarrier()) {
2669 // Compute address of modified element and store it into key register.
2670 __ lea(key, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize));
2671 __ RecordWrite(elements, key, value);
2672 }
2673}
2674
2675
2676void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
2677 ASSERT(ToRegister(instr->object()).is(edx));
2678 ASSERT(ToRegister(instr->key()).is(ecx));
2679 ASSERT(ToRegister(instr->value()).is(eax));
2680
2681 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
2682 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2683}
2684
2685
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00002686void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
2687 class DeferredStringCharCodeAt: public LDeferredCode {
2688 public:
2689 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
2690 : LDeferredCode(codegen), instr_(instr) { }
2691 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
2692 private:
2693 LStringCharCodeAt* instr_;
2694 };
2695
2696 Register string = ToRegister(instr->string());
2697 Register index = no_reg;
2698 int const_index = -1;
2699 if (instr->index()->IsConstantOperand()) {
2700 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2701 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2702 if (!Smi::IsValid(const_index)) {
2703 // Guaranteed to be out of bounds because of the assert above.
2704 // So the bounds check that must dominate this instruction must
2705 // have deoptimized already.
2706 if (FLAG_debug_code) {
2707 __ Abort("StringCharCodeAt: out of bounds index.");
2708 }
2709 // No code needs to be generated.
2710 return;
2711 }
2712 } else {
2713 index = ToRegister(instr->index());
2714 }
2715 Register result = ToRegister(instr->result());
2716
2717 DeferredStringCharCodeAt* deferred =
2718 new DeferredStringCharCodeAt(this, instr);
2719
2720 NearLabel flat_string, ascii_string, done;
2721
2722 // Fetch the instance type of the receiver into result register.
2723 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
2724 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
2725
2726 // We need special handling for non-flat strings.
2727 STATIC_ASSERT(kSeqStringTag == 0);
2728 __ test(result, Immediate(kStringRepresentationMask));
2729 __ j(zero, &flat_string);
2730
2731 // Handle non-flat strings.
2732 __ test(result, Immediate(kIsConsStringMask));
2733 __ j(zero, deferred->entry());
2734
2735 // ConsString.
2736 // Check whether the right hand side is the empty string (i.e. if
2737 // this is really a flat string in a cons string). If that is not
2738 // the case we would rather go to the runtime system now to flatten
2739 // the string.
2740 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
2741 Immediate(Factory::empty_string()));
2742 __ j(not_equal, deferred->entry());
2743 // Get the first of the two strings and load its instance type.
2744 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
2745 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
2746 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
2747 // If the first cons component is also non-flat, then go to runtime.
2748 STATIC_ASSERT(kSeqStringTag == 0);
2749 __ test(result, Immediate(kStringRepresentationMask));
2750 __ j(not_zero, deferred->entry());
2751
2752 // Check for 1-byte or 2-byte string.
2753 __ bind(&flat_string);
2754 STATIC_ASSERT(kAsciiStringTag != 0);
2755 __ test(result, Immediate(kStringEncodingMask));
2756 __ j(not_zero, &ascii_string);
2757
2758 // 2-byte string.
2759 // Load the 2-byte character code into the result register.
2760 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2761 if (instr->index()->IsConstantOperand()) {
2762 __ movzx_w(result,
2763 FieldOperand(string,
2764 SeqTwoByteString::kHeaderSize + 2 * const_index));
2765 } else {
2766 __ movzx_w(result, FieldOperand(string,
2767 index,
2768 times_2,
2769 SeqTwoByteString::kHeaderSize));
2770 }
2771 __ jmp(&done);
2772
2773 // ASCII string.
2774 // Load the byte into the result register.
2775 __ bind(&ascii_string);
2776 if (instr->index()->IsConstantOperand()) {
2777 __ movzx_b(result, FieldOperand(string,
2778 SeqAsciiString::kHeaderSize + const_index));
2779 } else {
2780 __ movzx_b(result, FieldOperand(string,
2781 index,
2782 times_1,
2783 SeqAsciiString::kHeaderSize));
2784 }
2785 __ bind(&done);
2786 __ bind(deferred->exit());
2787}
2788
2789
2790void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
2791 Register string = ToRegister(instr->string());
2792 Register result = ToRegister(instr->result());
2793
2794 // TODO(3095996): Get rid of this. For now, we need to make the
2795 // result register contain a valid pointer because it is already
2796 // contained in the register pointer map.
2797 __ Set(result, Immediate(0));
2798
2799 __ PushSafepointRegisters();
2800 __ push(string);
2801 // Push the index as a smi. This is safe because of the checks in
2802 // DoStringCharCodeAt above.
2803 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
2804 if (instr->index()->IsConstantOperand()) {
2805 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2806 __ push(Immediate(Smi::FromInt(const_index)));
2807 } else {
2808 Register index = ToRegister(instr->index());
2809 __ SmiTag(index);
2810 __ push(index);
2811 }
2812 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
2813 RecordSafepointWithRegisters(
2814 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
2815 if (FLAG_debug_code) {
2816 __ AbortIfNotSmi(eax);
2817 }
2818 __ SmiUntag(eax);
2819 __ mov(Operand(esp, EspIndexForPushAll(result) * kPointerSize), eax);
2820 __ PopSafepointRegisters();
2821}
2822
2823
2824void LCodeGen::DoStringLength(LStringLength* instr) {
2825 Register string = ToRegister(instr->string());
2826 Register result = ToRegister(instr->result());
2827 __ mov(result, FieldOperand(string, String::kLengthOffset));
2828}
2829
2830
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002831void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002832 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002833 ASSERT(input->IsRegister() || input->IsStackSlot());
2834 LOperand* output = instr->result();
2835 ASSERT(output->IsDoubleRegister());
2836 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
2837}
2838
2839
2840void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
2841 class DeferredNumberTagI: public LDeferredCode {
2842 public:
2843 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
2844 : LDeferredCode(codegen), instr_(instr) { }
2845 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
2846 private:
2847 LNumberTagI* instr_;
2848 };
2849
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002850 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002851 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2852 Register reg = ToRegister(input);
2853
2854 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
2855 __ SmiTag(reg);
2856 __ j(overflow, deferred->entry());
2857 __ bind(deferred->exit());
2858}
2859
2860
2861void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
2862 Label slow;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002863 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002864 Register tmp = reg.is(eax) ? ecx : eax;
2865
2866 // Preserve the value of all registers.
2867 __ PushSafepointRegisters();
2868
2869 // There was overflow, so bits 30 and 31 of the original integer
2870 // disagree. Try to allocate a heap number in new space and store
2871 // the value in there. If that fails, call the runtime system.
2872 NearLabel done;
2873 __ SmiUntag(reg);
2874 __ xor_(reg, 0x80000000);
2875 __ cvtsi2sd(xmm0, Operand(reg));
2876 if (FLAG_inline_new) {
2877 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
2878 __ jmp(&done);
2879 }
2880
2881 // Slow case: Call the runtime system to do the number allocation.
2882 __ bind(&slow);
2883
2884 // TODO(3095996): Put a valid pointer value in the stack slot where the result
2885 // register is stored, as this register is in the pointer map, but contains an
2886 // integer value.
2887 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0));
2888
2889 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2890 RecordSafepointWithRegisters(
2891 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2892 if (!reg.is(eax)) __ mov(reg, eax);
2893
2894 // Done. Put the value in xmm0 into the value of the allocated heap
2895 // number.
2896 __ bind(&done);
2897 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
2898 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), reg);
2899 __ PopSafepointRegisters();
2900}
2901
2902
2903void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
2904 class DeferredNumberTagD: public LDeferredCode {
2905 public:
2906 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
2907 : LDeferredCode(codegen), instr_(instr) { }
2908 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
2909 private:
2910 LNumberTagD* instr_;
2911 };
2912
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002913 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002914 Register reg = ToRegister(instr->result());
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002915 Register tmp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002916
2917 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
2918 if (FLAG_inline_new) {
2919 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
2920 } else {
2921 __ jmp(deferred->entry());
2922 }
2923 __ bind(deferred->exit());
2924 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
2925}
2926
2927
2928void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
2929 // TODO(3095996): Get rid of this. For now, we need to make the
2930 // result register contain a valid pointer because it is already
2931 // contained in the register pointer map.
2932 Register reg = ToRegister(instr->result());
2933 __ Set(reg, Immediate(0));
2934
2935 __ PushSafepointRegisters();
2936 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2937 RecordSafepointWithRegisters(
2938 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2939 __ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), eax);
2940 __ PopSafepointRegisters();
2941}
2942
2943
2944void LCodeGen::DoSmiTag(LSmiTag* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002945 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002946 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2947 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
2948 __ SmiTag(ToRegister(input));
2949}
2950
2951
2952void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00002953 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00002954 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2955 if (instr->needs_check()) {
2956 __ test(ToRegister(input), Immediate(kSmiTagMask));
2957 DeoptimizeIf(not_zero, instr->environment());
2958 }
2959 __ SmiUntag(ToRegister(input));
2960}
2961
2962
2963void LCodeGen::EmitNumberUntagD(Register input_reg,
2964 XMMRegister result_reg,
2965 LEnvironment* env) {
2966 NearLabel load_smi, heap_number, done;
2967
2968 // Smi check.
2969 __ test(input_reg, Immediate(kSmiTagMask));
2970 __ j(zero, &load_smi, not_taken);
2971
2972 // Heap number map check.
2973 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2974 Factory::heap_number_map());
2975 __ j(equal, &heap_number);
2976
2977 __ cmp(input_reg, Factory::undefined_value());
2978 DeoptimizeIf(not_equal, env);
2979
2980 // Convert undefined to NaN.
2981 __ push(input_reg);
2982 __ mov(input_reg, Factory::nan_value());
2983 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
2984 __ pop(input_reg);
2985 __ jmp(&done);
2986
2987 // Heap number to XMM conversion.
2988 __ bind(&heap_number);
2989 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
2990 __ jmp(&done);
2991
2992 // Smi to XMM conversion
2993 __ bind(&load_smi);
2994 __ SmiUntag(input_reg); // Untag smi before converting to float.
2995 __ cvtsi2sd(result_reg, Operand(input_reg));
2996 __ SmiTag(input_reg); // Retag smi.
2997 __ bind(&done);
2998}
2999
3000
3001class DeferredTaggedToI: public LDeferredCode {
3002 public:
3003 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3004 : LDeferredCode(codegen), instr_(instr) { }
3005 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3006 private:
3007 LTaggedToI* instr_;
3008};
3009
3010
3011void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3012 NearLabel done, heap_number;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003013 Register input_reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003014
3015 // Heap number map check.
3016 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3017 Factory::heap_number_map());
3018
3019 if (instr->truncating()) {
3020 __ j(equal, &heap_number);
3021 // Check for undefined. Undefined is converted to zero for truncating
3022 // conversions.
3023 __ cmp(input_reg, Factory::undefined_value());
3024 DeoptimizeIf(not_equal, instr->environment());
3025 __ mov(input_reg, 0);
3026 __ jmp(&done);
3027
3028 __ bind(&heap_number);
3029 if (CpuFeatures::IsSupported(SSE3)) {
3030 CpuFeatures::Scope scope(SSE3);
3031 NearLabel convert;
3032 // Use more powerful conversion when sse3 is available.
3033 // Load x87 register with heap number.
3034 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
3035 // Get exponent alone and check for too-big exponent.
3036 __ mov(input_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3037 __ and_(input_reg, HeapNumber::kExponentMask);
3038 const uint32_t kTooBigExponent =
3039 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3040 __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
3041 __ j(less, &convert);
3042 // Pop FPU stack before deoptimizing.
3043 __ ffree(0);
3044 __ fincstp();
3045 DeoptimizeIf(no_condition, instr->environment());
3046
3047 // Reserve space for 64 bit answer.
3048 __ bind(&convert);
3049 __ sub(Operand(esp), Immediate(kDoubleSize));
3050 // Do conversion, which cannot fail because we checked the exponent.
3051 __ fisttp_d(Operand(esp, 0));
3052 __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3053 __ add(Operand(esp), Immediate(kDoubleSize));
3054 } else {
3055 NearLabel deopt;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003056 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003057 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3058 __ cvttsd2si(input_reg, Operand(xmm0));
3059 __ cmp(input_reg, 0x80000000u);
3060 __ j(not_equal, &done);
3061 // Check if the input was 0x8000000 (kMinInt).
3062 // If no, then we got an overflow and we deoptimize.
3063 ExternalReference min_int = ExternalReference::address_of_min_int();
3064 __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
3065 __ ucomisd(xmm_temp, xmm0);
3066 DeoptimizeIf(not_equal, instr->environment());
3067 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3068 }
3069 } else {
3070 // Deoptimize if we don't have a heap number.
3071 DeoptimizeIf(not_equal, instr->environment());
3072
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003073 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003074 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3075 __ cvttsd2si(input_reg, Operand(xmm0));
3076 __ cvtsi2sd(xmm_temp, Operand(input_reg));
3077 __ ucomisd(xmm0, xmm_temp);
3078 DeoptimizeIf(not_equal, instr->environment());
3079 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3080 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3081 __ test(input_reg, Operand(input_reg));
3082 __ j(not_zero, &done);
3083 __ movmskpd(input_reg, xmm0);
3084 __ and_(input_reg, 1);
3085 DeoptimizeIf(not_zero, instr->environment());
3086 }
3087 }
3088 __ bind(&done);
3089}
3090
3091
3092void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003093 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003094 ASSERT(input->IsRegister());
3095 ASSERT(input->Equals(instr->result()));
3096
3097 Register input_reg = ToRegister(input);
3098
3099 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3100
3101 // Smi check.
3102 __ test(input_reg, Immediate(kSmiTagMask));
3103 __ j(not_zero, deferred->entry());
3104
3105 // Smi to int32 conversion
3106 __ SmiUntag(input_reg); // Untag smi.
3107
3108 __ bind(deferred->exit());
3109}
3110
3111
3112void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003113 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003114 ASSERT(input->IsRegister());
3115 LOperand* result = instr->result();
3116 ASSERT(result->IsDoubleRegister());
3117
3118 Register input_reg = ToRegister(input);
3119 XMMRegister result_reg = ToDoubleRegister(result);
3120
3121 EmitNumberUntagD(input_reg, result_reg, instr->environment());
3122}
3123
3124
3125void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003126 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003127 ASSERT(input->IsDoubleRegister());
3128 LOperand* result = instr->result();
3129 ASSERT(result->IsRegister());
3130
3131 XMMRegister input_reg = ToDoubleRegister(input);
3132 Register result_reg = ToRegister(result);
3133
3134 if (instr->truncating()) {
3135 // Performs a truncating conversion of a floating point number as used by
3136 // the JS bitwise operations.
3137 __ cvttsd2si(result_reg, Operand(input_reg));
3138 __ cmp(result_reg, 0x80000000u);
3139 if (CpuFeatures::IsSupported(SSE3)) {
3140 // This will deoptimize if the exponent of the input in out of range.
3141 CpuFeatures::Scope scope(SSE3);
3142 NearLabel convert, done;
3143 __ j(not_equal, &done);
3144 __ sub(Operand(esp), Immediate(kDoubleSize));
3145 __ movdbl(Operand(esp, 0), input_reg);
3146 // Get exponent alone and check for too-big exponent.
3147 __ mov(result_reg, Operand(esp, sizeof(int32_t)));
3148 __ and_(result_reg, HeapNumber::kExponentMask);
3149 const uint32_t kTooBigExponent =
3150 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3151 __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
3152 __ j(less, &convert);
3153 __ add(Operand(esp), Immediate(kDoubleSize));
3154 DeoptimizeIf(no_condition, instr->environment());
3155 __ bind(&convert);
3156 // Do conversion, which cannot fail because we checked the exponent.
3157 __ fld_d(Operand(esp, 0));
3158 __ fisttp_d(Operand(esp, 0));
3159 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
3160 __ add(Operand(esp), Immediate(kDoubleSize));
3161 __ bind(&done);
3162 } else {
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003163 NearLabel done;
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003164 Register temp_reg = ToRegister(instr->TempAt(0));
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003165 XMMRegister xmm_scratch = xmm0;
3166
3167 // If cvttsd2si succeeded, we're done. Otherwise, we attempt
3168 // manual conversion.
3169 __ j(not_equal, &done);
3170
3171 // Get high 32 bits of the input in result_reg and temp_reg.
3172 __ pshufd(xmm_scratch, input_reg, 1);
3173 __ movd(Operand(temp_reg), xmm_scratch);
3174 __ mov(result_reg, temp_reg);
3175
3176 // Prepare negation mask in temp_reg.
3177 __ sar(temp_reg, kBitsPerInt - 1);
3178
3179 // Extract the exponent from result_reg and subtract adjusted
3180 // bias from it. The adjustment is selected in a way such that
3181 // when the difference is zero, the answer is in the low 32 bits
3182 // of the input, otherwise a shift has to be performed.
3183 __ shr(result_reg, HeapNumber::kExponentShift);
3184 __ and_(result_reg,
3185 HeapNumber::kExponentMask >> HeapNumber::kExponentShift);
3186 __ sub(Operand(result_reg),
3187 Immediate(HeapNumber::kExponentBias +
3188 HeapNumber::kExponentBits +
3189 HeapNumber::kMantissaBits));
3190 // Don't handle big (> kMantissaBits + kExponentBits == 63) or
3191 // special exponents.
3192 DeoptimizeIf(greater, instr->environment());
3193
3194 // Zero out the sign and the exponent in the input (by shifting
3195 // it to the left) and restore the implicit mantissa bit,
3196 // i.e. convert the input to unsigned int64 shifted left by
3197 // kExponentBits.
3198 ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
3199 // Minus zero has the most significant bit set and the other
3200 // bits cleared.
3201 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
3202 __ psllq(input_reg, HeapNumber::kExponentBits);
3203 __ por(input_reg, xmm_scratch);
3204
3205 // Get the amount to shift the input right in xmm_scratch.
3206 __ neg(result_reg);
3207 __ movd(xmm_scratch, Operand(result_reg));
3208
3209 // Shift the input right and extract low 32 bits.
3210 __ psrlq(input_reg, xmm_scratch);
3211 __ movd(Operand(result_reg), input_reg);
3212
3213 // Use the prepared mask in temp_reg to negate the result if necessary.
3214 __ xor_(result_reg, Operand(temp_reg));
3215 __ sub(result_reg, Operand(temp_reg));
3216 __ bind(&done);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003217 }
3218 } else {
3219 NearLabel done;
3220 __ cvttsd2si(result_reg, Operand(input_reg));
3221 __ cvtsi2sd(xmm0, Operand(result_reg));
3222 __ ucomisd(xmm0, input_reg);
3223 DeoptimizeIf(not_equal, instr->environment());
3224 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3225 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3226 // The integer converted back is equal to the original. We
3227 // only have to test if we got -0 as an input.
3228 __ test(result_reg, Operand(result_reg));
3229 __ j(not_zero, &done);
3230 __ movmskpd(result_reg, input_reg);
3231 // Bit 0 contains the sign of the double in input_reg.
3232 // If input was positive, we are ok and return 0, otherwise
3233 // deoptimize.
3234 __ and_(result_reg, 1);
3235 DeoptimizeIf(not_zero, instr->environment());
3236 }
3237 __ bind(&done);
3238 }
3239}
3240
3241
3242void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003243 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003244 ASSERT(input->IsRegister());
3245 __ test(ToRegister(input), Immediate(kSmiTagMask));
3246 DeoptimizeIf(instr->condition(), instr->environment());
3247}
3248
3249
3250void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003251 Register input = ToRegister(instr->InputAt(0));
3252 Register temp = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003253 InstanceType first = instr->hydrogen()->first();
3254 InstanceType last = instr->hydrogen()->last();
3255
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003256 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003257
3258 // If there is only one type in the interval check for equality.
3259 if (first == last) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003260 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3261 static_cast<int8_t>(first));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003262 DeoptimizeIf(not_equal, instr->environment());
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003263 } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
3264 // String has a dedicated bit in instance type.
3265 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), kIsNotStringMask);
3266 DeoptimizeIf(not_zero, instr->environment());
3267 } else {
3268 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3269 static_cast<int8_t>(first));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003270 DeoptimizeIf(below, instr->environment());
3271 // Omit check for the last type.
3272 if (last != LAST_TYPE) {
3273 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3274 static_cast<int8_t>(last));
3275 DeoptimizeIf(above, instr->environment());
3276 }
3277 }
3278}
3279
3280
3281void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003282 ASSERT(instr->InputAt(0)->IsRegister());
3283 Register reg = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003284 __ cmp(reg, instr->hydrogen()->target());
3285 DeoptimizeIf(not_equal, instr->environment());
3286}
3287
3288
3289void LCodeGen::DoCheckMap(LCheckMap* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003290 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003291 ASSERT(input->IsRegister());
3292 Register reg = ToRegister(input);
3293 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3294 instr->hydrogen()->map());
3295 DeoptimizeIf(not_equal, instr->environment());
3296}
3297
3298
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003299void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
3300 if (Heap::InNewSpace(*object)) {
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003301 Handle<JSGlobalPropertyCell> cell =
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003302 Factory::NewJSGlobalPropertyCell(object);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003303 __ mov(result, Operand::Cell(cell));
3304 } else {
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003305 __ mov(result, object);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003306 }
3307}
3308
3309
3310void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003311 Register reg = ToRegister(instr->TempAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003312
3313 Handle<JSObject> holder = instr->holder();
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003314 Handle<JSObject> current_prototype = instr->prototype();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003315
3316 // Load prototype object.
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003317 LoadHeapObject(reg, current_prototype);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003318
3319 // Check prototype maps up to the holder.
3320 while (!current_prototype.is_identical_to(holder)) {
3321 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3322 Handle<Map>(current_prototype->map()));
3323 DeoptimizeIf(not_equal, instr->environment());
3324 current_prototype =
3325 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3326 // Load next prototype object.
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003327 LoadHeapObject(reg, current_prototype);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003328 }
3329
3330 // Check the holder map.
3331 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3332 Handle<Map>(current_prototype->map()));
3333 DeoptimizeIf(not_equal, instr->environment());
3334}
3335
3336
3337void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3338 // Setup the parameters to the stub/runtime call.
3339 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3340 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3341 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3342 __ push(Immediate(instr->hydrogen()->constant_elements()));
3343
3344 // Pick the right runtime function or stub to call.
3345 int length = instr->hydrogen()->length();
3346 if (instr->hydrogen()->IsCopyOnWrite()) {
3347 ASSERT(instr->hydrogen()->depth() == 1);
3348 FastCloneShallowArrayStub::Mode mode =
3349 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3350 FastCloneShallowArrayStub stub(mode, length);
3351 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3352 } else if (instr->hydrogen()->depth() > 1) {
3353 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
3354 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3355 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
3356 } else {
3357 FastCloneShallowArrayStub::Mode mode =
3358 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3359 FastCloneShallowArrayStub stub(mode, length);
3360 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3361 }
3362}
3363
3364
3365void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3366 // Setup the parameters to the stub/runtime call.
3367 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3368 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3369 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3370 __ push(Immediate(instr->hydrogen()->constant_properties()));
3371 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
3372
lrn@chromium.org5d00b602011-01-05 09:51:43 +00003373 // Pick the right runtime function to call.
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003374 if (instr->hydrogen()->depth() > 1) {
3375 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3376 } else {
3377 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3378 }
3379}
3380
3381
3382void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3383 NearLabel materialized;
3384 // Registers will be used as follows:
3385 // edi = JS function.
3386 // ecx = literals array.
3387 // ebx = regexp literal.
3388 // eax = regexp literal clone.
3389 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3390 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3391 int literal_offset = FixedArray::kHeaderSize +
3392 instr->hydrogen()->literal_index() * kPointerSize;
3393 __ mov(ebx, FieldOperand(ecx, literal_offset));
3394 __ cmp(ebx, Factory::undefined_value());
3395 __ j(not_equal, &materialized);
3396
3397 // Create regexp literal using runtime function
3398 // Result will be in eax.
3399 __ push(ecx);
3400 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3401 __ push(Immediate(instr->hydrogen()->pattern()));
3402 __ push(Immediate(instr->hydrogen()->flags()));
3403 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3404 __ mov(ebx, eax);
3405
3406 __ bind(&materialized);
3407 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3408 Label allocated, runtime_allocate;
3409 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3410 __ jmp(&allocated);
3411
3412 __ bind(&runtime_allocate);
3413 __ push(ebx);
3414 __ push(Immediate(Smi::FromInt(size)));
3415 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
3416 __ pop(ebx);
3417
3418 __ bind(&allocated);
3419 // Copy the content into the newly allocated memory.
3420 // (Unroll copy loop once for better throughput).
3421 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3422 __ mov(edx, FieldOperand(ebx, i));
3423 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3424 __ mov(FieldOperand(eax, i), edx);
3425 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3426 }
3427 if ((size % (2 * kPointerSize)) != 0) {
3428 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3429 __ mov(FieldOperand(eax, size - kPointerSize), edx);
3430 }
3431}
3432
3433
3434void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3435 // Use the fast case closure allocation code that allocates in new
3436 // space for nested functions that don't need literals cloning.
3437 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
ricow@chromium.org83aa5492011-02-07 12:42:56 +00003438 bool pretenure = instr->hydrogen()->pretenure();
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003439 if (shared_info->num_literals() == 0 && !pretenure) {
3440 FastNewClosureStub stub;
3441 __ push(Immediate(shared_info));
3442 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3443 } else {
3444 __ push(esi);
3445 __ push(Immediate(shared_info));
3446 __ push(Immediate(pretenure
3447 ? Factory::true_value()
3448 : Factory::false_value()));
3449 CallRuntime(Runtime::kNewClosure, 3, instr);
3450 }
3451}
3452
3453
3454void LCodeGen::DoTypeof(LTypeof* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003455 LOperand* input = instr->InputAt(0);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003456 if (input->IsConstantOperand()) {
3457 __ push(ToImmediate(input));
3458 } else {
3459 __ push(ToOperand(input));
3460 }
3461 CallRuntime(Runtime::kTypeof, 1, instr);
3462}
3463
3464
3465void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003466 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003467 Register result = ToRegister(instr->result());
3468 Label true_label;
3469 Label false_label;
3470 NearLabel done;
3471
3472 Condition final_branch_condition = EmitTypeofIs(&true_label,
3473 &false_label,
3474 input,
3475 instr->type_literal());
3476 __ j(final_branch_condition, &true_label);
3477 __ bind(&false_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003478 __ mov(result, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003479 __ jmp(&done);
3480
3481 __ bind(&true_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003482 __ mov(result, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003483
3484 __ bind(&done);
3485}
3486
3487
3488void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
erik.corry@gmail.com0511e242011-01-19 11:11:08 +00003489 Register input = ToRegister(instr->InputAt(0));
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003490 int true_block = chunk_->LookupDestination(instr->true_block_id());
3491 int false_block = chunk_->LookupDestination(instr->false_block_id());
3492 Label* true_label = chunk_->GetAssemblyLabel(true_block);
3493 Label* false_label = chunk_->GetAssemblyLabel(false_block);
3494
3495 Condition final_branch_condition = EmitTypeofIs(true_label,
3496 false_label,
3497 input,
3498 instr->type_literal());
3499
3500 EmitBranch(true_block, false_block, final_branch_condition);
3501}
3502
3503
3504Condition LCodeGen::EmitTypeofIs(Label* true_label,
3505 Label* false_label,
3506 Register input,
3507 Handle<String> type_name) {
3508 Condition final_branch_condition = no_condition;
3509 if (type_name->Equals(Heap::number_symbol())) {
3510 __ test(input, Immediate(kSmiTagMask));
3511 __ j(zero, true_label);
3512 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
3513 Factory::heap_number_map());
3514 final_branch_condition = equal;
3515
3516 } else if (type_name->Equals(Heap::string_symbol())) {
3517 __ test(input, Immediate(kSmiTagMask));
3518 __ j(zero, false_label);
3519 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
3520 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3521 1 << Map::kIsUndetectable);
3522 __ j(not_zero, false_label);
3523 __ CmpInstanceType(input, FIRST_NONSTRING_TYPE);
3524 final_branch_condition = below;
3525
3526 } else if (type_name->Equals(Heap::boolean_symbol())) {
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003527 __ cmp(input, Factory::true_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003528 __ j(equal, true_label);
vegorov@chromium.org0a4e9012011-01-24 12:33:13 +00003529 __ cmp(input, Factory::false_value());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003530 final_branch_condition = equal;
3531
3532 } else if (type_name->Equals(Heap::undefined_symbol())) {
3533 __ cmp(input, Factory::undefined_value());
3534 __ j(equal, true_label);
3535 __ test(input, Immediate(kSmiTagMask));
3536 __ j(zero, false_label);
3537 // Check for undetectable objects => true.
3538 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
3539 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3540 1 << Map::kIsUndetectable);
3541 final_branch_condition = not_zero;
3542
3543 } else if (type_name->Equals(Heap::function_symbol())) {
3544 __ test(input, Immediate(kSmiTagMask));
3545 __ j(zero, false_label);
3546 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
3547 __ j(equal, true_label);
3548 // Regular expressions => 'function' (they are callable).
3549 __ CmpInstanceType(input, JS_REGEXP_TYPE);
3550 final_branch_condition = equal;
3551
3552 } else if (type_name->Equals(Heap::object_symbol())) {
3553 __ test(input, Immediate(kSmiTagMask));
3554 __ j(zero, false_label);
3555 __ cmp(input, Factory::null_value());
3556 __ j(equal, true_label);
3557 // Regular expressions => 'function', not 'object'.
3558 __ CmpObjectType(input, JS_REGEXP_TYPE, input);
3559 __ j(equal, false_label);
3560 // Check for undetectable objects => false.
3561 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3562 1 << Map::kIsUndetectable);
3563 __ j(not_zero, false_label);
3564 // Check for JS objects => true.
3565 __ CmpInstanceType(input, FIRST_JS_OBJECT_TYPE);
3566 __ j(below, false_label);
3567 __ CmpInstanceType(input, LAST_JS_OBJECT_TYPE);
3568 final_branch_condition = below_equal;
3569
3570 } else {
3571 final_branch_condition = not_equal;
3572 __ jmp(false_label);
3573 // A dead branch instruction will be generated after this point.
3574 }
3575
3576 return final_branch_condition;
3577}
3578
3579
3580void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
3581 // No code for lazy bailout instruction. Used to capture environment after a
3582 // call for populating the safepoint data with deoptimization data.
3583}
3584
3585
3586void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
3587 DeoptimizeIf(no_condition, instr->environment());
3588}
3589
3590
3591void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
3592 LOperand* obj = instr->object();
3593 LOperand* key = instr->key();
3594 __ push(ToOperand(obj));
3595 if (key->IsConstantOperand()) {
3596 __ push(ToImmediate(key));
3597 } else {
3598 __ push(ToOperand(key));
3599 }
kmillikin@chromium.org31b12772011-02-02 16:08:26 +00003600 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
3601 LPointerMap* pointers = instr->pointer_map();
3602 LEnvironment* env = instr->deoptimization_environment();
3603 RecordPosition(pointers->position());
3604 RegisterEnvironmentForDeoptimization(env);
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003605 SafepointGenerator safepoint_generator(this,
kmillikin@chromium.org31b12772011-02-02 16:08:26 +00003606 pointers,
3607 env->deoptimization_index());
kasperl@chromium.orga5551262010-12-07 12:49:48 +00003608 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
3609}
3610
3611
3612void LCodeGen::DoStackCheck(LStackCheck* instr) {
3613 // Perform stack overflow check.
3614 NearLabel done;
3615 ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
3616 __ cmp(esp, Operand::StaticVariable(stack_limit));
3617 __ j(above_equal, &done);
3618
3619 StackCheckStub stub;
3620 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3621 __ bind(&done);
3622}
3623
3624
3625void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
3626 // This is a pseudo-instruction that ensures that the environment here is
3627 // properly registered for deoptimization and records the assembler's PC
3628 // offset.
3629 LEnvironment* environment = instr->environment();
3630 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
3631 instr->SpilledDoubleRegisterArray());
3632
3633 // If the environment were already registered, we would have no way of
3634 // backpatching it with the spill slot operands.
3635 ASSERT(!environment->HasBeenRegistered());
3636 RegisterEnvironmentForDeoptimization(environment);
3637 ASSERT(osr_pc_offset_ == -1);
3638 osr_pc_offset_ = masm()->pc_offset();
3639}
3640
3641
3642#undef __
3643
3644} } // namespace v8::internal
sgjesse@chromium.orgc6c57182011-01-17 12:24:25 +00003645
3646#endif // V8_TARGET_ARCH_IA32