blob: 8bcce3388457d9e6d8837c59914a3a47dbd55f38 [file] [log] [blame]
Ben Murdochb8e0da22011-05-16 14:20:40 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Ben Murdochb8e0da22011-05-16 14:20:40 +010028#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "ia32/lithium-codegen-ia32.h"
33#include "code-stubs.h"
Steve Block44f0eee2011-05-26 01:26:41 +010034#include "deoptimizer.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010035#include "stub-cache.h"
36
37namespace v8 {
38namespace internal {
39
40
Steve Block1e0659c2011-05-24 12:43:12 +010041// When invoking builtins, we need to record the safepoint in the middle of
42// the invoke instruction sequence generated by the macro assembler.
Ben Murdochb0fe1622011-05-05 13:52:32 +010043class SafepointGenerator : public PostCallGenerator {
44 public:
45 SafepointGenerator(LCodeGen* codegen,
46 LPointerMap* pointers,
Steve Block44f0eee2011-05-26 01:26:41 +010047 int deoptimization_index)
Ben Murdochb0fe1622011-05-05 13:52:32 +010048 : codegen_(codegen),
49 pointers_(pointers),
Steve Block44f0eee2011-05-26 01:26:41 +010050 deoptimization_index_(deoptimization_index) {}
Ben Murdochb0fe1622011-05-05 13:52:32 +010051 virtual ~SafepointGenerator() { }
52
53 virtual void Generate() {
54 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
55 }
56
57 private:
58 LCodeGen* codegen_;
59 LPointerMap* pointers_;
60 int deoptimization_index_;
61};
62
63
64#define __ masm()->
65
66bool LCodeGen::GenerateCode() {
67 HPhase phase("Code generation", chunk());
68 ASSERT(is_unused());
69 status_ = GENERATING;
70 CpuFeatures::Scope scope(SSE2);
71 return GeneratePrologue() &&
72 GenerateBody() &&
73 GenerateDeferredCode() &&
74 GenerateSafepointTable();
75}
76
77
78void LCodeGen::FinishCode(Handle<Code> code) {
79 ASSERT(is_done());
Steve Block053d10c2011-06-13 19:13:29 +010080 code->set_stack_slots(StackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010081 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010082 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010083 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010084}
85
86
87void LCodeGen::Abort(const char* format, ...) {
88 if (FLAG_trace_bailout) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +010089 SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
90 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +010091 va_list arguments;
92 va_start(arguments, format);
93 OS::VPrint(format, arguments);
94 va_end(arguments);
95 PrintF("\n");
96 }
97 status_ = ABORTED;
98}
99
100
101void LCodeGen::Comment(const char* format, ...) {
102 if (!FLAG_code_comments) return;
103 char buffer[4 * KB];
104 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
105 va_list arguments;
106 va_start(arguments, format);
107 builder.AddFormattedList(format, arguments);
108 va_end(arguments);
109
110 // Copy the string before recording it in the assembler to avoid
111 // issues when the stack allocated buffer goes out of scope.
112 size_t length = builder.position();
113 Vector<char> copy = Vector<char>::New(length + 1);
114 memcpy(copy.start(), builder.Finalize(), copy.length());
115 masm()->RecordComment(copy.start());
116}
117
118
119bool LCodeGen::GeneratePrologue() {
120 ASSERT(is_generating());
121
122#ifdef DEBUG
123 if (strlen(FLAG_stop_at) > 0 &&
124 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
125 __ int3();
126 }
127#endif
128
129 __ push(ebp); // Caller's frame pointer.
130 __ mov(ebp, esp);
131 __ push(esi); // Callee's context.
132 __ push(edi); // Callee's JS function.
133
134 // Reserve space for the stack slots needed by the code.
Steve Block053d10c2011-06-13 19:13:29 +0100135 int slots = StackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100136 if (slots > 0) {
137 if (FLAG_debug_code) {
138 __ mov(Operand(eax), Immediate(slots));
139 Label loop;
140 __ bind(&loop);
141 __ push(Immediate(kSlotsZapValue));
142 __ dec(eax);
143 __ j(not_zero, &loop);
144 } else {
145 __ sub(Operand(esp), Immediate(slots * kPointerSize));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100146#ifdef _MSC_VER
147 // On windows, you may not access the stack more than one page below
148 // the most recently mapped page. To make the allocated area randomly
149 // accessible, we write to each page in turn (the value is irrelevant).
150 const int kPageSize = 4 * KB;
151 for (int offset = slots * kPointerSize - kPageSize;
152 offset > 0;
153 offset -= kPageSize) {
154 __ mov(Operand(esp, offset), eax);
155 }
156#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +0100157 }
158 }
159
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100160 // Possibly allocate a local context.
161 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
162 if (heap_slots > 0) {
163 Comment(";;; Allocate local context");
164 // Argument to NewContext is the function, which is still in edi.
165 __ push(edi);
166 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
167 FastNewContextStub stub(heap_slots);
168 __ CallStub(&stub);
169 } else {
170 __ CallRuntime(Runtime::kNewContext, 1);
171 }
172 RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
173 // Context is returned in both eax and esi. It replaces the context
174 // passed to us. It's saved in the stack and kept live in esi.
175 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
176
177 // Copy parameters into context if necessary.
178 int num_parameters = scope()->num_parameters();
179 for (int i = 0; i < num_parameters; i++) {
180 Slot* slot = scope()->parameter(i)->AsSlot();
181 if (slot != NULL && slot->type() == Slot::CONTEXT) {
182 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
183 (num_parameters - 1 - i) * kPointerSize;
184 // Load parameter from stack.
185 __ mov(eax, Operand(ebp, parameter_offset));
186 // Store it in the context.
187 int context_offset = Context::SlotOffset(slot->index());
188 __ mov(Operand(esi, context_offset), eax);
189 // Update the write barrier. This clobbers all involved
190 // registers, so we have to use a third register to avoid
191 // clobbering esi.
192 __ mov(ecx, esi);
193 __ RecordWrite(ecx, context_offset, eax, ebx);
194 }
195 }
196 Comment(";;; End allocate local context");
197 }
198
Ben Murdochb0fe1622011-05-05 13:52:32 +0100199 // Trace the call.
200 if (FLAG_trace) {
Steve Block1e0659c2011-05-24 12:43:12 +0100201 // We have not executed any compiled code yet, so esi still holds the
202 // incoming context.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100203 __ CallRuntime(Runtime::kTraceEnter, 0);
204 }
205 return !is_aborted();
206}
207
208
209bool LCodeGen::GenerateBody() {
210 ASSERT(is_generating());
211 bool emit_instructions = true;
212 for (current_instruction_ = 0;
213 !is_aborted() && current_instruction_ < instructions_->length();
214 current_instruction_++) {
215 LInstruction* instr = instructions_->at(current_instruction_);
216 if (instr->IsLabel()) {
217 LLabel* label = LLabel::cast(instr);
218 emit_instructions = !label->HasReplacement();
219 }
220
221 if (emit_instructions) {
222 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
223 instr->CompileToNative(this);
224 }
225 }
226 return !is_aborted();
227}
228
229
230LInstruction* LCodeGen::GetNextInstruction() {
231 if (current_instruction_ < instructions_->length() - 1) {
232 return instructions_->at(current_instruction_ + 1);
233 } else {
234 return NULL;
235 }
236}
237
238
239bool LCodeGen::GenerateDeferredCode() {
240 ASSERT(is_generating());
241 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
242 LDeferredCode* code = deferred_[i];
243 __ bind(code->entry());
244 code->Generate();
245 __ jmp(code->exit());
246 }
247
248 // Deferred code is the last part of the instruction sequence. Mark
249 // the generated code as done unless we bailed out.
250 if (!is_aborted()) status_ = DONE;
251 return !is_aborted();
252}
253
254
255bool LCodeGen::GenerateSafepointTable() {
256 ASSERT(is_done());
Steve Block053d10c2011-06-13 19:13:29 +0100257 safepoints_.Emit(masm(), StackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100258 return !is_aborted();
259}
260
261
262Register LCodeGen::ToRegister(int index) const {
263 return Register::FromAllocationIndex(index);
264}
265
266
267XMMRegister LCodeGen::ToDoubleRegister(int index) const {
268 return XMMRegister::FromAllocationIndex(index);
269}
270
271
272Register LCodeGen::ToRegister(LOperand* op) const {
273 ASSERT(op->IsRegister());
274 return ToRegister(op->index());
275}
276
277
278XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
279 ASSERT(op->IsDoubleRegister());
280 return ToDoubleRegister(op->index());
281}
282
283
284int LCodeGen::ToInteger32(LConstantOperand* op) const {
285 Handle<Object> value = chunk_->LookupLiteral(op);
286 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
287 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
288 value->Number());
289 return static_cast<int32_t>(value->Number());
290}
291
292
293Immediate LCodeGen::ToImmediate(LOperand* op) {
294 LConstantOperand* const_op = LConstantOperand::cast(op);
295 Handle<Object> literal = chunk_->LookupLiteral(const_op);
296 Representation r = chunk_->LookupLiteralRepresentation(const_op);
297 if (r.IsInteger32()) {
298 ASSERT(literal->IsNumber());
299 return Immediate(static_cast<int32_t>(literal->Number()));
300 } else if (r.IsDouble()) {
301 Abort("unsupported double immediate");
302 }
303 ASSERT(r.IsTagged());
304 return Immediate(literal);
305}
306
307
308Operand LCodeGen::ToOperand(LOperand* op) const {
309 if (op->IsRegister()) return Operand(ToRegister(op));
310 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
311 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
312 int index = op->index();
313 if (index >= 0) {
314 // Local or spill slot. Skip the frame pointer, function, and
315 // context in the fixed part of the frame.
316 return Operand(ebp, -(index + 3) * kPointerSize);
317 } else {
318 // Incoming parameter. Skip the return address.
319 return Operand(ebp, -(index - 1) * kPointerSize);
320 }
321}
322
323
Ben Murdochb8e0da22011-05-16 14:20:40 +0100324Operand LCodeGen::HighOperand(LOperand* op) {
325 ASSERT(op->IsDoubleStackSlot());
326 int index = op->index();
327 int offset = (index >= 0) ? index + 3 : index - 1;
328 return Operand(ebp, -offset * kPointerSize);
329}
330
331
332void LCodeGen::WriteTranslation(LEnvironment* environment,
333 Translation* translation) {
334 if (environment == NULL) return;
335
336 // The translation includes one command per value in the environment.
337 int translation_size = environment->values()->length();
338 // The output frame height does not include the parameters.
339 int height = translation_size - environment->parameter_count();
340
341 WriteTranslation(environment->outer(), translation);
342 int closure_id = DefineDeoptimizationLiteral(environment->closure());
343 translation->BeginFrame(environment->ast_id(), closure_id, height);
344 for (int i = 0; i < translation_size; ++i) {
345 LOperand* value = environment->values()->at(i);
346 // spilled_registers_ and spilled_double_registers_ are either
347 // both NULL or both set.
348 if (environment->spilled_registers() != NULL && value != NULL) {
349 if (value->IsRegister() &&
350 environment->spilled_registers()[value->index()] != NULL) {
351 translation->MarkDuplicate();
352 AddToTranslation(translation,
353 environment->spilled_registers()[value->index()],
354 environment->HasTaggedValueAt(i));
355 } else if (
356 value->IsDoubleRegister() &&
357 environment->spilled_double_registers()[value->index()] != NULL) {
358 translation->MarkDuplicate();
359 AddToTranslation(
360 translation,
361 environment->spilled_double_registers()[value->index()],
362 false);
363 }
364 }
365
366 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
367 }
368}
369
370
Ben Murdochb0fe1622011-05-05 13:52:32 +0100371void LCodeGen::AddToTranslation(Translation* translation,
372 LOperand* op,
373 bool is_tagged) {
374 if (op == NULL) {
375 // TODO(twuerthinger): Introduce marker operands to indicate that this value
376 // is not present and must be reconstructed from the deoptimizer. Currently
377 // this is only used for the arguments object.
378 translation->StoreArgumentsObject();
379 } else if (op->IsStackSlot()) {
380 if (is_tagged) {
381 translation->StoreStackSlot(op->index());
382 } else {
383 translation->StoreInt32StackSlot(op->index());
384 }
385 } else if (op->IsDoubleStackSlot()) {
386 translation->StoreDoubleStackSlot(op->index());
387 } else if (op->IsArgument()) {
388 ASSERT(is_tagged);
Steve Block053d10c2011-06-13 19:13:29 +0100389 int src_index = StackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100390 translation->StoreStackSlot(src_index);
391 } else if (op->IsRegister()) {
392 Register reg = ToRegister(op);
393 if (is_tagged) {
394 translation->StoreRegister(reg);
395 } else {
396 translation->StoreInt32Register(reg);
397 }
398 } else if (op->IsDoubleRegister()) {
399 XMMRegister reg = ToDoubleRegister(op);
400 translation->StoreDoubleRegister(reg);
401 } else if (op->IsConstantOperand()) {
402 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
403 int src_index = DefineDeoptimizationLiteral(literal);
404 translation->StoreLiteral(src_index);
405 } else {
406 UNREACHABLE();
407 }
408}
409
410
Ben Murdoch8b112d22011-06-08 16:22:53 +0100411void LCodeGen::CallCodeGeneric(Handle<Code> code,
412 RelocInfo::Mode mode,
413 LInstruction* instr,
414 ContextMode context_mode,
415 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100416 ASSERT(instr != NULL);
417 LPointerMap* pointers = instr->pointer_map();
418 RecordPosition(pointers->position());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100419
Ben Murdoch8b112d22011-06-08 16:22:53 +0100420 if (context_mode == RESTORE_CONTEXT) {
Steve Block1e0659c2011-05-24 12:43:12 +0100421 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100422 }
Steve Block1e0659c2011-05-24 12:43:12 +0100423 __ call(code, mode);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100424
Ben Murdoch8b112d22011-06-08 16:22:53 +0100425 RegisterLazyDeoptimization(instr, safepoint_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100426
427 // Signal that we don't inline smi code before these stubs in the
428 // optimizing code generator.
429 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
430 code->kind() == Code::COMPARE_IC) {
431 __ nop();
432 }
433}
434
435
Ben Murdoch8b112d22011-06-08 16:22:53 +0100436void LCodeGen::CallCode(Handle<Code> code,
437 RelocInfo::Mode mode,
438 LInstruction* instr,
439 ContextMode context_mode) {
440 CallCodeGeneric(code, mode, instr, context_mode, RECORD_SIMPLE_SAFEPOINT);
441}
442
443
Steve Block44f0eee2011-05-26 01:26:41 +0100444void LCodeGen::CallRuntime(const Runtime::Function* fun,
Steve Block1e0659c2011-05-24 12:43:12 +0100445 int argc,
446 LInstruction* instr,
Ben Murdoch8b112d22011-06-08 16:22:53 +0100447 ContextMode context_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100448 ASSERT(instr != NULL);
Steve Block1e0659c2011-05-24 12:43:12 +0100449 ASSERT(instr->HasPointerMap());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100450 LPointerMap* pointers = instr->pointer_map();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100451 RecordPosition(pointers->position());
452
Ben Murdoch8b112d22011-06-08 16:22:53 +0100453 if (context_mode == RESTORE_CONTEXT) {
Steve Block1e0659c2011-05-24 12:43:12 +0100454 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100455 }
Steve Block1e0659c2011-05-24 12:43:12 +0100456 __ CallRuntime(fun, argc);
Steve Block44f0eee2011-05-26 01:26:41 +0100457
Ben Murdoch8b112d22011-06-08 16:22:53 +0100458 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100459}
460
461
Ben Murdoch8b112d22011-06-08 16:22:53 +0100462void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
463 int argc,
464 LInstruction* instr) {
465 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
466 __ CallRuntimeSaveDoubles(id);
467 RecordSafepointWithRegisters(
468 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
469}
470
471
472void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
473 SafepointMode safepoint_mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100474 // Create the environment to bailout to. If the call has side effects
475 // execution has to continue after the call otherwise execution can continue
476 // from a previous bailout point repeating the call.
477 LEnvironment* deoptimization_environment;
478 if (instr->HasDeoptimizationEnvironment()) {
479 deoptimization_environment = instr->deoptimization_environment();
480 } else {
481 deoptimization_environment = instr->environment();
482 }
483
484 RegisterEnvironmentForDeoptimization(deoptimization_environment);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100485 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
486 RecordSafepoint(instr->pointer_map(),
487 deoptimization_environment->deoptimization_index());
488 } else {
489 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
490 RecordSafepointWithRegisters(
491 instr->pointer_map(),
492 0,
493 deoptimization_environment->deoptimization_index());
494 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100495}
496
497
498void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
499 if (!environment->HasBeenRegistered()) {
500 // Physical stack frame layout:
501 // -x ............. -4 0 ..................................... y
502 // [incoming arguments] [spill slots] [pushed outgoing arguments]
503
504 // Layout of the environment:
505 // 0 ..................................................... size-1
506 // [parameters] [locals] [expression stack including arguments]
507
508 // Layout of the translation:
509 // 0 ........................................................ size - 1 + 4
510 // [expression stack including arguments] [locals] [4 words] [parameters]
511 // |>------------ translation_size ------------<|
512
513 int frame_count = 0;
514 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
515 ++frame_count;
516 }
517 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100518 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100519 int deoptimization_index = deoptimizations_.length();
520 environment->Register(deoptimization_index, translation.index());
521 deoptimizations_.Add(environment);
522 }
523}
524
525
526void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
527 RegisterEnvironmentForDeoptimization(environment);
528 ASSERT(environment->HasBeenRegistered());
529 int id = environment->deoptimization_index();
530 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
531 ASSERT(entry != NULL);
532 if (entry == NULL) {
533 Abort("bailout was not prepared");
534 return;
535 }
536
537 if (FLAG_deopt_every_n_times != 0) {
538 Handle<SharedFunctionInfo> shared(info_->shared_info());
539 Label no_deopt;
540 __ pushfd();
541 __ push(eax);
542 __ push(ebx);
543 __ mov(ebx, shared);
544 __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
545 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
546 __ j(not_zero, &no_deopt);
547 if (FLAG_trap_on_deopt) __ int3();
548 __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
549 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
550 __ pop(ebx);
551 __ pop(eax);
552 __ popfd();
553 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
554
555 __ bind(&no_deopt);
556 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
557 __ pop(ebx);
558 __ pop(eax);
559 __ popfd();
560 }
561
562 if (cc == no_condition) {
563 if (FLAG_trap_on_deopt) __ int3();
564 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
565 } else {
566 if (FLAG_trap_on_deopt) {
567 NearLabel done;
568 __ j(NegateCondition(cc), &done);
569 __ int3();
570 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
571 __ bind(&done);
572 } else {
573 __ j(cc, entry, RelocInfo::RUNTIME_ENTRY, not_taken);
574 }
575 }
576}
577
578
579void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
580 int length = deoptimizations_.length();
581 if (length == 0) return;
582 ASSERT(FLAG_deopt);
583 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100584 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100585
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100586 Handle<ByteArray> translations = translations_.CreateByteArray();
587 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100588 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
589
590 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100591 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100592 for (int i = 0; i < deoptimization_literals_.length(); i++) {
593 literals->set(i, *deoptimization_literals_[i]);
594 }
595 data->SetLiteralArray(*literals);
596
597 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
598 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
599
600 // Populate the deoptimization entries.
601 for (int i = 0; i < length; i++) {
602 LEnvironment* env = deoptimizations_[i];
603 data->SetAstId(i, Smi::FromInt(env->ast_id()));
604 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
605 data->SetArgumentsStackHeight(i,
606 Smi::FromInt(env->arguments_stack_height()));
607 }
608 code->set_deoptimization_data(*data);
609}
610
611
612int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
613 int result = deoptimization_literals_.length();
614 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
615 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
616 }
617 deoptimization_literals_.Add(literal);
618 return result;
619}
620
621
622void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
623 ASSERT(deoptimization_literals_.length() == 0);
624
625 const ZoneList<Handle<JSFunction> >* inlined_closures =
626 chunk()->inlined_closures();
627
628 for (int i = 0, length = inlined_closures->length();
629 i < length;
630 i++) {
631 DefineDeoptimizationLiteral(inlined_closures->at(i));
632 }
633
634 inlined_function_count_ = deoptimization_literals_.length();
635}
636
637
Steve Block1e0659c2011-05-24 12:43:12 +0100638void LCodeGen::RecordSafepoint(
639 LPointerMap* pointers,
640 Safepoint::Kind kind,
641 int arguments,
642 int deoptimization_index) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100643 ASSERT(kind == expected_safepoint_kind_);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100644 const ZoneList<LOperand*>* operands = pointers->operands();
645 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
Steve Block1e0659c2011-05-24 12:43:12 +0100646 kind, arguments, deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100647 for (int i = 0; i < operands->length(); i++) {
648 LOperand* pointer = operands->at(i);
649 if (pointer->IsStackSlot()) {
650 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100651 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
652 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100653 }
654 }
655}
656
657
Steve Block1e0659c2011-05-24 12:43:12 +0100658void LCodeGen::RecordSafepoint(LPointerMap* pointers,
659 int deoptimization_index) {
660 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
661}
662
663
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100664void LCodeGen::RecordSafepoint(int deoptimization_index) {
665 LPointerMap empty_pointers(RelocInfo::kNoPosition);
666 RecordSafepoint(&empty_pointers, deoptimization_index);
667}
668
669
Ben Murdochb0fe1622011-05-05 13:52:32 +0100670void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
671 int arguments,
672 int deoptimization_index) {
Steve Block1e0659c2011-05-24 12:43:12 +0100673 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
674 deoptimization_index);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100675}
676
677
678void LCodeGen::RecordPosition(int position) {
679 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
680 masm()->positions_recorder()->RecordPosition(position);
681}
682
683
684void LCodeGen::DoLabel(LLabel* label) {
685 if (label->is_loop_header()) {
686 Comment(";;; B%d - LOOP entry", label->block_id());
687 } else {
688 Comment(";;; B%d", label->block_id());
689 }
690 __ bind(label->label());
691 current_block_ = label->block_id();
692 LCodeGen::DoGap(label);
693}
694
695
696void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100697 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100698}
699
700
701void LCodeGen::DoGap(LGap* gap) {
702 for (int i = LGap::FIRST_INNER_POSITION;
703 i <= LGap::LAST_INNER_POSITION;
704 i++) {
705 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
706 LParallelMove* move = gap->GetParallelMove(inner_pos);
707 if (move != NULL) DoParallelMove(move);
708 }
709
710 LInstruction* next = GetNextInstruction();
711 if (next != NULL && next->IsLazyBailout()) {
712 int pc = masm()->pc_offset();
713 safepoints_.SetPcAfterGap(pc);
714 }
715}
716
717
718void LCodeGen::DoParameter(LParameter* instr) {
719 // Nothing to do.
720}
721
722
723void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +0100724 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100725 ASSERT(ToRegister(instr->result()).is(eax));
726 switch (instr->hydrogen()->major_key()) {
727 case CodeStub::RegExpConstructResult: {
728 RegExpConstructResultStub stub;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100729 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100730 break;
731 }
732 case CodeStub::RegExpExec: {
733 RegExpExecStub stub;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100734 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100735 break;
736 }
737 case CodeStub::SubString: {
738 SubStringStub stub;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100739 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100740 break;
741 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100742 case CodeStub::NumberToString: {
743 NumberToStringStub stub;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100744 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100745 break;
746 }
747 case CodeStub::StringAdd: {
748 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100749 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100750 break;
751 }
752 case CodeStub::StringCompare: {
753 StringCompareStub stub;
Ben Murdoch8b112d22011-06-08 16:22:53 +0100754 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100755 break;
756 }
757 case CodeStub::TranscendentalCache: {
758 TranscendentalCacheStub stub(instr->transcendental_type(),
759 TranscendentalCacheStub::TAGGED);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100760 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100761 break;
762 }
763 default:
764 UNREACHABLE();
765 }
766}
767
768
769void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
770 // Nothing to do.
771}
772
773
774void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100775 if (instr->hydrogen()->HasPowerOf2Divisor()) {
776 Register dividend = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100777
Steve Block44f0eee2011-05-26 01:26:41 +0100778 int32_t divisor =
779 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100780
Steve Block44f0eee2011-05-26 01:26:41 +0100781 if (divisor < 0) divisor = -divisor;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100782
Steve Block44f0eee2011-05-26 01:26:41 +0100783 NearLabel positive_dividend, done;
784 __ test(dividend, Operand(dividend));
785 __ j(not_sign, &positive_dividend);
786 __ neg(dividend);
787 __ and_(dividend, divisor - 1);
788 __ neg(dividend);
789 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
790 __ j(not_zero, &done);
791 DeoptimizeIf(no_condition, instr->environment());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +0100792 } else {
793 __ jmp(&done);
Steve Block44f0eee2011-05-26 01:26:41 +0100794 }
795 __ bind(&positive_dividend);
796 __ and_(dividend, divisor - 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100797 __ bind(&done);
798 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100799 LOperand* right = instr->InputAt(1);
800 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
801 ASSERT(ToRegister(instr->result()).is(edx));
802
803 Register right_reg = ToRegister(right);
804 ASSERT(!right_reg.is(eax));
805 ASSERT(!right_reg.is(edx));
806
807 // Check for x % 0.
808 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
809 __ test(right_reg, ToOperand(right));
810 DeoptimizeIf(zero, instr->environment());
811 }
812
813 // Sign extend to edx.
814 __ cdq();
815
816 // Check for (0 % -x) that will produce negative zero.
817 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
818 NearLabel positive_left;
819 NearLabel done;
820 __ test(eax, Operand(eax));
821 __ j(not_sign, &positive_left);
822 __ idiv(right_reg);
823
824 // Test the remainder for 0, because then the result would be -0.
825 __ test(edx, Operand(edx));
826 __ j(not_zero, &done);
827
828 DeoptimizeIf(no_condition, instr->environment());
829 __ bind(&positive_left);
830 __ idiv(right_reg);
831 __ bind(&done);
832 } else {
833 __ idiv(right_reg);
834 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100835 }
836}
837
838
839void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100840 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100841 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100842 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
843 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
844 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100845
846 Register left_reg = eax;
847
848 // Check for x / 0.
849 Register right_reg = ToRegister(right);
850 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
851 __ test(right_reg, ToOperand(right));
852 DeoptimizeIf(zero, instr->environment());
853 }
854
855 // Check for (0 / -x) that will produce negative zero.
856 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
857 NearLabel left_not_zero;
858 __ test(left_reg, Operand(left_reg));
859 __ j(not_zero, &left_not_zero);
860 __ test(right_reg, ToOperand(right));
861 DeoptimizeIf(sign, instr->environment());
862 __ bind(&left_not_zero);
863 }
864
865 // Check for (-kMinInt / -1).
866 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
867 NearLabel left_not_min_int;
868 __ cmp(left_reg, kMinInt);
869 __ j(not_zero, &left_not_min_int);
870 __ cmp(right_reg, -1);
871 DeoptimizeIf(zero, instr->environment());
872 __ bind(&left_not_min_int);
873 }
874
875 // Sign extend to edx.
876 __ cdq();
877 __ idiv(right_reg);
878
879 // Deoptimize if remainder is not 0.
880 __ test(edx, Operand(edx));
881 DeoptimizeIf(not_zero, instr->environment());
882}
883
884
885void LCodeGen::DoMulI(LMulI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100886 Register left = ToRegister(instr->InputAt(0));
887 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100888
889 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100890 __ mov(ToRegister(instr->TempAt(0)), left);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100891 }
892
893 if (right->IsConstantOperand()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100894 // Try strength reductions on the multiplication.
895 // All replacement instructions are at most as long as the imul
896 // and have better latency.
897 int constant = ToInteger32(LConstantOperand::cast(right));
898 if (constant == -1) {
899 __ neg(left);
900 } else if (constant == 0) {
901 __ xor_(left, Operand(left));
902 } else if (constant == 2) {
903 __ add(left, Operand(left));
904 } else if (!instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
905 // If we know that the multiplication can't overflow, it's safe to
906 // use instructions that don't set the overflow flag for the
907 // multiplication.
908 switch (constant) {
909 case 1:
910 // Do nothing.
911 break;
912 case 3:
913 __ lea(left, Operand(left, left, times_2, 0));
914 break;
915 case 4:
916 __ shl(left, 2);
917 break;
918 case 5:
919 __ lea(left, Operand(left, left, times_4, 0));
920 break;
921 case 8:
922 __ shl(left, 3);
923 break;
924 case 9:
925 __ lea(left, Operand(left, left, times_8, 0));
926 break;
927 case 16:
928 __ shl(left, 4);
929 break;
930 default:
931 __ imul(left, left, constant);
932 break;
933 }
934 } else {
935 __ imul(left, left, constant);
936 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100937 } else {
938 __ imul(left, ToOperand(right));
939 }
940
941 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
942 DeoptimizeIf(overflow, instr->environment());
943 }
944
945 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
946 // Bail out if the result is supposed to be negative zero.
947 NearLabel done;
948 __ test(left, Operand(left));
949 __ j(not_zero, &done);
950 if (right->IsConstantOperand()) {
Steve Block1e0659c2011-05-24 12:43:12 +0100951 if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100952 DeoptimizeIf(no_condition, instr->environment());
953 }
954 } else {
955 // Test the non-zero operand for negative sign.
Ben Murdochb8e0da22011-05-16 14:20:40 +0100956 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100957 DeoptimizeIf(sign, instr->environment());
958 }
959 __ bind(&done);
960 }
961}
962
963
964void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100965 LOperand* left = instr->InputAt(0);
966 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100967 ASSERT(left->Equals(instr->result()));
968 ASSERT(left->IsRegister());
969
970 if (right->IsConstantOperand()) {
971 int right_operand = ToInteger32(LConstantOperand::cast(right));
972 switch (instr->op()) {
973 case Token::BIT_AND:
974 __ and_(ToRegister(left), right_operand);
975 break;
976 case Token::BIT_OR:
977 __ or_(ToRegister(left), right_operand);
978 break;
979 case Token::BIT_XOR:
980 __ xor_(ToRegister(left), right_operand);
981 break;
982 default:
983 UNREACHABLE();
984 break;
985 }
986 } else {
987 switch (instr->op()) {
988 case Token::BIT_AND:
989 __ and_(ToRegister(left), ToOperand(right));
990 break;
991 case Token::BIT_OR:
992 __ or_(ToRegister(left), ToOperand(right));
993 break;
994 case Token::BIT_XOR:
995 __ xor_(ToRegister(left), ToOperand(right));
996 break;
997 default:
998 UNREACHABLE();
999 break;
1000 }
1001 }
1002}
1003
1004
1005void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001006 LOperand* left = instr->InputAt(0);
1007 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001008 ASSERT(left->Equals(instr->result()));
1009 ASSERT(left->IsRegister());
1010 if (right->IsRegister()) {
1011 ASSERT(ToRegister(right).is(ecx));
1012
1013 switch (instr->op()) {
1014 case Token::SAR:
1015 __ sar_cl(ToRegister(left));
1016 break;
1017 case Token::SHR:
1018 __ shr_cl(ToRegister(left));
1019 if (instr->can_deopt()) {
1020 __ test(ToRegister(left), Immediate(0x80000000));
1021 DeoptimizeIf(not_zero, instr->environment());
1022 }
1023 break;
1024 case Token::SHL:
1025 __ shl_cl(ToRegister(left));
1026 break;
1027 default:
1028 UNREACHABLE();
1029 break;
1030 }
1031 } else {
1032 int value = ToInteger32(LConstantOperand::cast(right));
1033 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1034 switch (instr->op()) {
1035 case Token::SAR:
1036 if (shift_count != 0) {
1037 __ sar(ToRegister(left), shift_count);
1038 }
1039 break;
1040 case Token::SHR:
1041 if (shift_count == 0 && instr->can_deopt()) {
1042 __ test(ToRegister(left), Immediate(0x80000000));
1043 DeoptimizeIf(not_zero, instr->environment());
1044 } else {
1045 __ shr(ToRegister(left), shift_count);
1046 }
1047 break;
1048 case Token::SHL:
1049 if (shift_count != 0) {
1050 __ shl(ToRegister(left), shift_count);
1051 }
1052 break;
1053 default:
1054 UNREACHABLE();
1055 break;
1056 }
1057 }
1058}
1059
1060
1061void LCodeGen::DoSubI(LSubI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001062 LOperand* left = instr->InputAt(0);
1063 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001064 ASSERT(left->Equals(instr->result()));
1065
1066 if (right->IsConstantOperand()) {
1067 __ sub(ToOperand(left), ToImmediate(right));
1068 } else {
1069 __ sub(ToRegister(left), ToOperand(right));
1070 }
1071 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1072 DeoptimizeIf(overflow, instr->environment());
1073 }
1074}
1075
1076
1077void LCodeGen::DoConstantI(LConstantI* instr) {
1078 ASSERT(instr->result()->IsRegister());
Steve Block9fac8402011-05-12 15:51:54 +01001079 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001080}
1081
1082
1083void LCodeGen::DoConstantD(LConstantD* instr) {
1084 ASSERT(instr->result()->IsDoubleRegister());
1085 XMMRegister res = ToDoubleRegister(instr->result());
1086 double v = instr->value();
1087 // Use xor to produce +0.0 in a fast and compact way, but avoid to
1088 // do so if the constant is -0.0.
1089 if (BitCast<uint64_t, double>(v) == 0) {
1090 __ xorpd(res, res);
1091 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01001092 Register temp = ToRegister(instr->TempAt(0));
1093 uint64_t int_val = BitCast<uint64_t, double>(v);
1094 int32_t lower = static_cast<int32_t>(int_val);
1095 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001096 if (CpuFeatures::IsSupported(SSE4_1)) {
Steve Block1e0659c2011-05-24 12:43:12 +01001097 CpuFeatures::Scope scope(SSE4_1);
1098 if (lower != 0) {
1099 __ Set(temp, Immediate(lower));
1100 __ movd(res, Operand(temp));
1101 __ Set(temp, Immediate(upper));
1102 __ pinsrd(res, Operand(temp), 1);
1103 } else {
1104 __ xorpd(res, res);
1105 __ Set(temp, Immediate(upper));
1106 __ pinsrd(res, Operand(temp), 1);
1107 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001108 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01001109 __ Set(temp, Immediate(upper));
1110 __ movd(res, Operand(temp));
1111 __ psllq(res, 32);
1112 if (lower != 0) {
1113 __ Set(temp, Immediate(lower));
1114 __ movd(xmm0, Operand(temp));
1115 __ por(res, xmm0);
1116 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001117 }
1118 }
1119}
1120
1121
1122void LCodeGen::DoConstantT(LConstantT* instr) {
1123 ASSERT(instr->result()->IsRegister());
Steve Block9fac8402011-05-12 15:51:54 +01001124 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001125}
1126
1127
Steve Block9fac8402011-05-12 15:51:54 +01001128void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001129 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001130 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001131 __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
1132}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001133
Ben Murdochb0fe1622011-05-05 13:52:32 +01001134
Steve Block9fac8402011-05-12 15:51:54 +01001135void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1136 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001137 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001138 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001139}
1140
1141
Steve Block44f0eee2011-05-26 01:26:41 +01001142void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001143 Register result = ToRegister(instr->result());
1144 Register array = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01001145 __ mov(result, FieldOperand(array, ExternalArray::kLengthOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01001146}
1147
1148
Ben Murdochb0fe1622011-05-05 13:52:32 +01001149void LCodeGen::DoValueOf(LValueOf* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001150 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001151 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001152 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001153 ASSERT(input.is(result));
1154 NearLabel done;
1155 // If the object is a smi return the object.
1156 __ test(input, Immediate(kSmiTagMask));
1157 __ j(zero, &done);
1158
1159 // If the object is not a value type, return the object.
1160 __ CmpObjectType(input, JS_VALUE_TYPE, map);
1161 __ j(not_equal, &done);
1162 __ mov(result, FieldOperand(input, JSValue::kValueOffset));
1163
1164 __ bind(&done);
1165}
1166
1167
1168void LCodeGen::DoBitNotI(LBitNotI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001169 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001170 ASSERT(input->Equals(instr->result()));
1171 __ not_(ToRegister(input));
1172}
1173
1174
1175void LCodeGen::DoThrow(LThrow* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001176 __ push(ToOperand(instr->InputAt(0)));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001177 CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001178
1179 if (FLAG_debug_code) {
1180 Comment("Unreachable code.");
1181 __ int3();
1182 }
1183}
1184
1185
1186void LCodeGen::DoAddI(LAddI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001187 LOperand* left = instr->InputAt(0);
1188 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001189 ASSERT(left->Equals(instr->result()));
1190
1191 if (right->IsConstantOperand()) {
1192 __ add(ToOperand(left), ToImmediate(right));
1193 } else {
1194 __ add(ToRegister(left), ToOperand(right));
1195 }
1196
1197 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1198 DeoptimizeIf(overflow, instr->environment());
1199 }
1200}
1201
1202
1203void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001204 XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1205 XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1206 XMMRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001207 // Modulo uses a fixed result register.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001208 ASSERT(instr->op() == Token::MOD || left.is(result));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001209 switch (instr->op()) {
1210 case Token::ADD:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001211 __ addsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001212 break;
1213 case Token::SUB:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001214 __ subsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001215 break;
1216 case Token::MUL:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001217 __ mulsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001218 break;
1219 case Token::DIV:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001220 __ divsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001221 break;
1222 case Token::MOD: {
1223 // Pass two doubles as arguments on the stack.
1224 __ PrepareCallCFunction(4, eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001225 __ movdbl(Operand(esp, 0 * kDoubleSize), left);
1226 __ movdbl(Operand(esp, 1 * kDoubleSize), right);
Steve Block44f0eee2011-05-26 01:26:41 +01001227 __ CallCFunction(
1228 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1229 4);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001230
1231 // Return value is in st(0) on ia32.
1232 // Store it into the (fixed) result register.
1233 __ sub(Operand(esp), Immediate(kDoubleSize));
1234 __ fstp_d(Operand(esp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001235 __ movdbl(result, Operand(esp, 0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001236 __ add(Operand(esp), Immediate(kDoubleSize));
1237 break;
1238 }
1239 default:
1240 UNREACHABLE();
1241 break;
1242 }
1243}
1244
1245
1246void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001247 ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1248 ASSERT(ToRegister(instr->InputAt(1)).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001249 ASSERT(ToRegister(instr->result()).is(eax));
1250
1251 TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001252 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001253 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001254}
1255
1256
1257int LCodeGen::GetNextEmittedBlock(int block) {
1258 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1259 LLabel* label = chunk_->GetLabel(i);
1260 if (!label->HasReplacement()) return i;
1261 }
1262 return -1;
1263}
1264
1265
1266void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1267 int next_block = GetNextEmittedBlock(current_block_);
1268 right_block = chunk_->LookupDestination(right_block);
1269 left_block = chunk_->LookupDestination(left_block);
1270
1271 if (right_block == left_block) {
1272 EmitGoto(left_block);
1273 } else if (left_block == next_block) {
1274 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1275 } else if (right_block == next_block) {
1276 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1277 } else {
1278 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1279 __ jmp(chunk_->GetAssemblyLabel(right_block));
1280 }
1281}
1282
1283
1284void LCodeGen::DoBranch(LBranch* instr) {
1285 int true_block = chunk_->LookupDestination(instr->true_block_id());
1286 int false_block = chunk_->LookupDestination(instr->false_block_id());
1287
1288 Representation r = instr->hydrogen()->representation();
1289 if (r.IsInteger32()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001290 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001291 __ test(reg, Operand(reg));
1292 EmitBranch(true_block, false_block, not_zero);
1293 } else if (r.IsDouble()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001294 XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001295 __ xorpd(xmm0, xmm0);
1296 __ ucomisd(reg, xmm0);
1297 EmitBranch(true_block, false_block, not_equal);
1298 } else {
1299 ASSERT(r.IsTagged());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001300 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001301 if (instr->hydrogen()->type().IsBoolean()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001302 __ cmp(reg, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001303 EmitBranch(true_block, false_block, equal);
1304 } else {
1305 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1306 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1307
Steve Block44f0eee2011-05-26 01:26:41 +01001308 __ cmp(reg, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001309 __ j(equal, false_label);
Steve Block44f0eee2011-05-26 01:26:41 +01001310 __ cmp(reg, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001311 __ j(equal, true_label);
Steve Block44f0eee2011-05-26 01:26:41 +01001312 __ cmp(reg, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001313 __ j(equal, false_label);
1314 __ test(reg, Operand(reg));
1315 __ j(equal, false_label);
1316 __ test(reg, Immediate(kSmiTagMask));
1317 __ j(zero, true_label);
1318
1319 // Test for double values. Zero is false.
1320 NearLabel call_stub;
1321 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001322 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001323 __ j(not_equal, &call_stub);
1324 __ fldz();
1325 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1326 __ FCmp();
1327 __ j(zero, false_label);
1328 __ jmp(true_label);
1329
1330 // The conversion stub doesn't cause garbage collections so it's
1331 // safe to not record a safepoint after the call.
1332 __ bind(&call_stub);
1333 ToBooleanStub stub;
1334 __ pushad();
1335 __ push(reg);
1336 __ CallStub(&stub);
1337 __ test(eax, Operand(eax));
1338 __ popad();
1339 EmitBranch(true_block, false_block, not_zero);
1340 }
1341 }
1342}
1343
1344
1345void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1346 block = chunk_->LookupDestination(block);
1347 int next_block = GetNextEmittedBlock(current_block_);
1348 if (block != next_block) {
1349 // Perform stack overflow check if this goto needs it before jumping.
1350 if (deferred_stack_check != NULL) {
1351 ExternalReference stack_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01001352 ExternalReference::address_of_stack_limit(isolate());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001353 __ cmp(esp, Operand::StaticVariable(stack_limit));
1354 __ j(above_equal, chunk_->GetAssemblyLabel(block));
1355 __ jmp(deferred_stack_check->entry());
1356 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1357 } else {
1358 __ jmp(chunk_->GetAssemblyLabel(block));
1359 }
1360 }
1361}
1362
1363
1364void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001365 PushSafepointRegistersScope scope(this);
1366 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001367}
1368
1369void LCodeGen::DoGoto(LGoto* instr) {
1370 class DeferredStackCheck: public LDeferredCode {
1371 public:
1372 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1373 : LDeferredCode(codegen), instr_(instr) { }
1374 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1375 private:
1376 LGoto* instr_;
1377 };
1378
1379 DeferredStackCheck* deferred = NULL;
1380 if (instr->include_stack_check()) {
1381 deferred = new DeferredStackCheck(this, instr);
1382 }
1383 EmitGoto(instr->block_id(), deferred);
1384}
1385
1386
1387Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1388 Condition cond = no_condition;
1389 switch (op) {
1390 case Token::EQ:
1391 case Token::EQ_STRICT:
1392 cond = equal;
1393 break;
1394 case Token::LT:
1395 cond = is_unsigned ? below : less;
1396 break;
1397 case Token::GT:
1398 cond = is_unsigned ? above : greater;
1399 break;
1400 case Token::LTE:
1401 cond = is_unsigned ? below_equal : less_equal;
1402 break;
1403 case Token::GTE:
1404 cond = is_unsigned ? above_equal : greater_equal;
1405 break;
1406 case Token::IN:
1407 case Token::INSTANCEOF:
1408 default:
1409 UNREACHABLE();
1410 }
1411 return cond;
1412}
1413
1414
1415void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1416 if (right->IsConstantOperand()) {
1417 __ cmp(ToOperand(left), ToImmediate(right));
1418 } else {
1419 __ cmp(ToRegister(left), ToOperand(right));
1420 }
1421}
1422
1423
1424void LCodeGen::DoCmpID(LCmpID* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001425 LOperand* left = instr->InputAt(0);
1426 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001427 LOperand* result = instr->result();
1428
1429 NearLabel unordered;
1430 if (instr->is_double()) {
1431 // Don't base result on EFLAGS when a NaN is involved. Instead
1432 // jump to the unordered case, which produces a false value.
1433 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1434 __ j(parity_even, &unordered, not_taken);
1435 } else {
1436 EmitCmpI(left, right);
1437 }
1438
1439 NearLabel done;
1440 Condition cc = TokenToCondition(instr->op(), instr->is_double());
Steve Block44f0eee2011-05-26 01:26:41 +01001441 __ mov(ToRegister(result), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001442 __ j(cc, &done);
1443
1444 __ bind(&unordered);
Steve Block44f0eee2011-05-26 01:26:41 +01001445 __ mov(ToRegister(result), factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001446 __ bind(&done);
1447}
1448
1449
1450void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001451 LOperand* left = instr->InputAt(0);
1452 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001453 int false_block = chunk_->LookupDestination(instr->false_block_id());
1454 int true_block = chunk_->LookupDestination(instr->true_block_id());
1455
1456 if (instr->is_double()) {
1457 // Don't base result on EFLAGS when a NaN is involved. Instead
1458 // jump to the false block.
1459 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1460 __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
1461 } else {
1462 EmitCmpI(left, right);
1463 }
1464
1465 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1466 EmitBranch(true_block, false_block, cc);
1467}
1468
1469
1470void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001471 Register left = ToRegister(instr->InputAt(0));
1472 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001473 Register result = ToRegister(instr->result());
1474
1475 __ cmp(left, Operand(right));
Steve Block44f0eee2011-05-26 01:26:41 +01001476 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001477 NearLabel done;
1478 __ j(equal, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01001479 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001480 __ bind(&done);
1481}
1482
1483
1484void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001485 Register left = ToRegister(instr->InputAt(0));
1486 Register right = ToRegister(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001487 int false_block = chunk_->LookupDestination(instr->false_block_id());
1488 int true_block = chunk_->LookupDestination(instr->true_block_id());
1489
1490 __ cmp(left, Operand(right));
1491 EmitBranch(true_block, false_block, equal);
1492}
1493
1494
1495void LCodeGen::DoIsNull(LIsNull* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001496 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001497 Register result = ToRegister(instr->result());
1498
1499 // TODO(fsc): If the expression is known to be a smi, then it's
1500 // definitely not null. Materialize false.
1501
Steve Block44f0eee2011-05-26 01:26:41 +01001502 __ cmp(reg, factory()->null_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001503 if (instr->is_strict()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001504 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001505 NearLabel done;
1506 __ j(equal, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01001507 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001508 __ bind(&done);
1509 } else {
1510 NearLabel true_value, false_value, done;
1511 __ j(equal, &true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01001512 __ cmp(reg, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001513 __ j(equal, &true_value);
1514 __ test(reg, Immediate(kSmiTagMask));
1515 __ j(zero, &false_value);
1516 // Check for undetectable objects by looking in the bit field in
1517 // the map. The object has already been smi checked.
1518 Register scratch = result;
1519 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1520 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1521 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1522 __ j(not_zero, &true_value);
1523 __ bind(&false_value);
Steve Block44f0eee2011-05-26 01:26:41 +01001524 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001525 __ jmp(&done);
1526 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01001527 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001528 __ bind(&done);
1529 }
1530}
1531
1532
1533void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001534 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001535
1536 // TODO(fsc): If the expression is known to be a smi, then it's
1537 // definitely not null. Jump to the false block.
1538
1539 int true_block = chunk_->LookupDestination(instr->true_block_id());
1540 int false_block = chunk_->LookupDestination(instr->false_block_id());
1541
Steve Block44f0eee2011-05-26 01:26:41 +01001542 __ cmp(reg, factory()->null_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001543 if (instr->is_strict()) {
1544 EmitBranch(true_block, false_block, equal);
1545 } else {
1546 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1547 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1548 __ j(equal, true_label);
Steve Block44f0eee2011-05-26 01:26:41 +01001549 __ cmp(reg, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001550 __ j(equal, true_label);
1551 __ test(reg, Immediate(kSmiTagMask));
1552 __ j(zero, false_label);
1553 // Check for undetectable objects by looking in the bit field in
1554 // the map. The object has already been smi checked.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001555 Register scratch = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001556 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1557 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1558 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1559 EmitBranch(true_block, false_block, not_zero);
1560 }
1561}
1562
1563
1564Condition LCodeGen::EmitIsObject(Register input,
1565 Register temp1,
1566 Register temp2,
1567 Label* is_not_object,
1568 Label* is_object) {
1569 ASSERT(!input.is(temp1));
1570 ASSERT(!input.is(temp2));
1571 ASSERT(!temp1.is(temp2));
1572
1573 __ test(input, Immediate(kSmiTagMask));
1574 __ j(equal, is_not_object);
1575
Steve Block44f0eee2011-05-26 01:26:41 +01001576 __ cmp(input, isolate()->factory()->null_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001577 __ j(equal, is_object);
1578
1579 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1580 // Undetectable objects behave like undefined.
1581 __ movzx_b(temp2, FieldOperand(temp1, Map::kBitFieldOffset));
1582 __ test(temp2, Immediate(1 << Map::kIsUndetectable));
1583 __ j(not_zero, is_not_object);
1584
1585 __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset));
1586 __ cmp(temp2, FIRST_JS_OBJECT_TYPE);
1587 __ j(below, is_not_object);
1588 __ cmp(temp2, LAST_JS_OBJECT_TYPE);
1589 return below_equal;
1590}
1591
1592
1593void LCodeGen::DoIsObject(LIsObject* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001594 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001595 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001596 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001597 Label is_false, is_true, done;
1598
1599 Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1600 __ j(true_cond, &is_true);
1601
1602 __ bind(&is_false);
Steve Block44f0eee2011-05-26 01:26:41 +01001603 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001604 __ jmp(&done);
1605
1606 __ bind(&is_true);
Steve Block44f0eee2011-05-26 01:26:41 +01001607 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001608
1609 __ bind(&done);
1610}
1611
1612
1613void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001614 Register reg = ToRegister(instr->InputAt(0));
1615 Register temp = ToRegister(instr->TempAt(0));
1616 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001617
1618 int true_block = chunk_->LookupDestination(instr->true_block_id());
1619 int false_block = chunk_->LookupDestination(instr->false_block_id());
1620 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1621 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1622
1623 Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label);
1624
1625 EmitBranch(true_block, false_block, true_cond);
1626}
1627
1628
1629void LCodeGen::DoIsSmi(LIsSmi* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001630 Operand input = ToOperand(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001631 Register result = ToRegister(instr->result());
1632
1633 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1634 __ test(input, Immediate(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01001635 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001636 NearLabel done;
1637 __ j(zero, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01001638 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001639 __ bind(&done);
1640}
1641
1642
1643void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001644 Operand input = ToOperand(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001645
1646 int true_block = chunk_->LookupDestination(instr->true_block_id());
1647 int false_block = chunk_->LookupDestination(instr->false_block_id());
1648
1649 __ test(input, Immediate(kSmiTagMask));
1650 EmitBranch(true_block, false_block, zero);
1651}
1652
1653
Ben Murdochb8e0da22011-05-16 14:20:40 +01001654static InstanceType TestType(HHasInstanceType* instr) {
1655 InstanceType from = instr->from();
1656 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001657 if (from == FIRST_TYPE) return to;
1658 ASSERT(from == to || to == LAST_TYPE);
1659 return from;
1660}
1661
1662
Ben Murdochb8e0da22011-05-16 14:20:40 +01001663static Condition BranchCondition(HHasInstanceType* instr) {
1664 InstanceType from = instr->from();
1665 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001666 if (from == to) return equal;
1667 if (to == LAST_TYPE) return above_equal;
1668 if (from == FIRST_TYPE) return below_equal;
1669 UNREACHABLE();
1670 return equal;
1671}
1672
1673
1674void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001675 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001676 Register result = ToRegister(instr->result());
1677
1678 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1679 __ test(input, Immediate(kSmiTagMask));
1680 NearLabel done, is_false;
1681 __ j(zero, &is_false);
Ben Murdochb8e0da22011-05-16 14:20:40 +01001682 __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1683 __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
Steve Block44f0eee2011-05-26 01:26:41 +01001684 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001685 __ jmp(&done);
1686 __ bind(&is_false);
Steve Block44f0eee2011-05-26 01:26:41 +01001687 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001688 __ bind(&done);
1689}
1690
1691
1692void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001693 Register input = ToRegister(instr->InputAt(0));
1694 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001695
1696 int true_block = chunk_->LookupDestination(instr->true_block_id());
1697 int false_block = chunk_->LookupDestination(instr->false_block_id());
1698
1699 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1700
1701 __ test(input, Immediate(kSmiTagMask));
1702 __ j(zero, false_label);
1703
Ben Murdochb8e0da22011-05-16 14:20:40 +01001704 __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1705 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001706}
1707
1708
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001709void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1710 Register input = ToRegister(instr->InputAt(0));
1711 Register result = ToRegister(instr->result());
1712
1713 if (FLAG_debug_code) {
1714 __ AbortIfNotString(input);
1715 }
1716
1717 __ mov(result, FieldOperand(input, String::kHashFieldOffset));
1718 __ IndexFromHash(result, result);
1719}
1720
1721
Ben Murdochb0fe1622011-05-05 13:52:32 +01001722void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001723 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001724 Register result = ToRegister(instr->result());
1725
1726 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
Steve Block44f0eee2011-05-26 01:26:41 +01001727 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001728 __ test(FieldOperand(input, String::kHashFieldOffset),
1729 Immediate(String::kContainsCachedArrayIndexMask));
1730 NearLabel done;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001731 __ j(zero, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01001732 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001733 __ bind(&done);
1734}
1735
1736
1737void LCodeGen::DoHasCachedArrayIndexAndBranch(
1738 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001739 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001740
1741 int true_block = chunk_->LookupDestination(instr->true_block_id());
1742 int false_block = chunk_->LookupDestination(instr->false_block_id());
1743
1744 __ test(FieldOperand(input, String::kHashFieldOffset),
1745 Immediate(String::kContainsCachedArrayIndexMask));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001746 EmitBranch(true_block, false_block, equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001747}
1748
1749
1750// Branches to a label or falls through with the answer in the z flag. Trashes
1751// the temp registers, but not the input. Only input and temp2 may alias.
1752void LCodeGen::EmitClassOfTest(Label* is_true,
1753 Label* is_false,
1754 Handle<String>class_name,
1755 Register input,
1756 Register temp,
1757 Register temp2) {
1758 ASSERT(!input.is(temp));
1759 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1760 __ test(input, Immediate(kSmiTagMask));
1761 __ j(zero, is_false);
1762 __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1763 __ j(below, is_false);
1764
1765 // Map is now in temp.
1766 // Functions have class 'Function'.
1767 __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1768 if (class_name->IsEqualTo(CStrVector("Function"))) {
1769 __ j(equal, is_true);
1770 } else {
1771 __ j(equal, is_false);
1772 }
1773
1774 // Check if the constructor in the map is a function.
1775 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1776
1777 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1778 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1779 // LAST_JS_OBJECT_TYPE.
1780 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1781 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1782
1783 // Objects with a non-function constructor have class 'Object'.
1784 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1785 if (class_name->IsEqualTo(CStrVector("Object"))) {
1786 __ j(not_equal, is_true);
1787 } else {
1788 __ j(not_equal, is_false);
1789 }
1790
1791 // temp now contains the constructor function. Grab the
1792 // instance class name from there.
1793 __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1794 __ mov(temp, FieldOperand(temp,
1795 SharedFunctionInfo::kInstanceClassNameOffset));
1796 // The class name we are testing against is a symbol because it's a literal.
1797 // The name in the constructor is a symbol because of the way the context is
1798 // booted. This routine isn't expected to work for random API-created
1799 // classes and it doesn't have to because you can't access it with natives
1800 // syntax. Since both sides are symbols it is sufficient to use an identity
1801 // comparison.
1802 __ cmp(temp, class_name);
1803 // End with the answer in the z flag.
1804}
1805
1806
1807void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001808 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001809 Register result = ToRegister(instr->result());
1810 ASSERT(input.is(result));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001811 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001812 Handle<String> class_name = instr->hydrogen()->class_name();
1813 NearLabel done;
1814 Label is_true, is_false;
1815
1816 EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input);
1817
1818 __ j(not_equal, &is_false);
1819
1820 __ bind(&is_true);
Steve Block44f0eee2011-05-26 01:26:41 +01001821 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001822 __ jmp(&done);
1823
1824 __ bind(&is_false);
Steve Block44f0eee2011-05-26 01:26:41 +01001825 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001826 __ bind(&done);
1827}
1828
1829
1830void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001831 Register input = ToRegister(instr->InputAt(0));
1832 Register temp = ToRegister(instr->TempAt(0));
1833 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001834 if (input.is(temp)) {
1835 // Swap.
1836 Register swapper = temp;
1837 temp = temp2;
1838 temp2 = swapper;
1839 }
1840 Handle<String> class_name = instr->hydrogen()->class_name();
1841
1842 int true_block = chunk_->LookupDestination(instr->true_block_id());
1843 int false_block = chunk_->LookupDestination(instr->false_block_id());
1844
1845 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1846 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1847
1848 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1849
1850 EmitBranch(true_block, false_block, equal);
1851}
1852
1853
1854void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001855 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001856 int true_block = instr->true_block_id();
1857 int false_block = instr->false_block_id();
1858
1859 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1860 EmitBranch(true_block, false_block, equal);
1861}
1862
1863
1864void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001865 // Object and function are in fixed registers defined by the stub.
Steve Block1e0659c2011-05-24 12:43:12 +01001866 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001867 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001868 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001869
1870 NearLabel true_value, done;
1871 __ test(eax, Operand(eax));
1872 __ j(zero, &true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01001873 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001874 __ jmp(&done);
1875 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01001876 __ mov(ToRegister(instr->result()), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001877 __ bind(&done);
1878}
1879
1880
1881void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01001882 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001883 int true_block = chunk_->LookupDestination(instr->true_block_id());
1884 int false_block = chunk_->LookupDestination(instr->false_block_id());
1885
1886 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001887 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001888 __ test(eax, Operand(eax));
1889 EmitBranch(true_block, false_block, zero);
1890}
1891
1892
Ben Murdoch086aeea2011-05-13 15:57:08 +01001893void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1894 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1895 public:
1896 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1897 LInstanceOfKnownGlobal* instr)
1898 : LDeferredCode(codegen), instr_(instr) { }
1899 virtual void Generate() {
1900 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1901 }
1902
1903 Label* map_check() { return &map_check_; }
1904
1905 private:
1906 LInstanceOfKnownGlobal* instr_;
1907 Label map_check_;
1908 };
1909
1910 DeferredInstanceOfKnownGlobal* deferred;
1911 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1912
1913 Label done, false_result;
Ben Murdochb8e0da22011-05-16 14:20:40 +01001914 Register object = ToRegister(instr->InputAt(0));
1915 Register temp = ToRegister(instr->TempAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001916
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001917 // A Smi is not an instance of anything.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001918 __ test(object, Immediate(kSmiTagMask));
1919 __ j(zero, &false_result, not_taken);
1920
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001921 // This is the inlined call site instanceof cache. The two occurences of the
Ben Murdoch086aeea2011-05-13 15:57:08 +01001922 // hole value will be patched to the last map/result pair generated by the
1923 // instanceof stub.
1924 NearLabel cache_miss;
Ben Murdochb8e0da22011-05-16 14:20:40 +01001925 Register map = ToRegister(instr->TempAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001926 __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
1927 __ bind(deferred->map_check()); // Label for calculating code patching.
Steve Block44f0eee2011-05-26 01:26:41 +01001928 __ cmp(map, factory()->the_hole_value()); // Patched to cached map.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001929 __ j(not_equal, &cache_miss, not_taken);
Steve Block44f0eee2011-05-26 01:26:41 +01001930 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001931 __ jmp(&done);
1932
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001933 // The inlined call site cache did not match. Check for null and string
1934 // before calling the deferred code.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001935 __ bind(&cache_miss);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001936 // Null is not an instance of anything.
Steve Block44f0eee2011-05-26 01:26:41 +01001937 __ cmp(object, factory()->null_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01001938 __ j(equal, &false_result);
1939
1940 // String values are not instances of anything.
1941 Condition is_string = masm_->IsObjectStringType(object, temp, temp);
1942 __ j(is_string, &false_result);
1943
1944 // Go to the deferred code.
1945 __ jmp(deferred->entry());
1946
1947 __ bind(&false_result);
Steve Block44f0eee2011-05-26 01:26:41 +01001948 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01001949
1950 // Here result has either true or false. Deferred code also produces true or
1951 // false object.
1952 __ bind(deferred->exit());
1953 __ bind(&done);
1954}
1955
1956
1957void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1958 Label* map_check) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001959 PushSafepointRegistersScope scope(this);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001960
1961 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1962 flags = static_cast<InstanceofStub::Flags>(
1963 flags | InstanceofStub::kArgsInRegisters);
1964 flags = static_cast<InstanceofStub::Flags>(
1965 flags | InstanceofStub::kCallSiteInlineCheck);
1966 flags = static_cast<InstanceofStub::Flags>(
1967 flags | InstanceofStub::kReturnTrueFalseObject);
1968 InstanceofStub stub(flags);
1969
Ben Murdoch8b112d22011-06-08 16:22:53 +01001970 // Get the temp register reserved by the instruction. This needs to be a
1971 // register which is pushed last by PushSafepointRegisters as top of the
1972 // stack is used to pass the offset to the location of the map check to
1973 // the stub.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001974 Register temp = ToRegister(instr->TempAt(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001975 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001976 __ mov(InstanceofStub::right(), Immediate(instr->function()));
Steve Block1e0659c2011-05-24 12:43:12 +01001977 static const int kAdditionalDelta = 16;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001978 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001979 __ mov(temp, Immediate(delta));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001980 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001981 CallCodeGeneric(stub.GetCode(),
1982 RelocInfo::CODE_TARGET,
1983 instr,
1984 RESTORE_CONTEXT,
1985 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001986 // Put the result value into the eax slot and restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001987 __ StoreToSafepointRegisterSlot(eax, eax);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001988}
1989
1990
Ben Murdochb0fe1622011-05-05 13:52:32 +01001991static Condition ComputeCompareCondition(Token::Value op) {
1992 switch (op) {
1993 case Token::EQ_STRICT:
1994 case Token::EQ:
1995 return equal;
1996 case Token::LT:
1997 return less;
1998 case Token::GT:
1999 return greater;
2000 case Token::LTE:
2001 return less_equal;
2002 case Token::GTE:
2003 return greater_equal;
2004 default:
2005 UNREACHABLE();
2006 return no_condition;
2007 }
2008}
2009
2010
2011void LCodeGen::DoCmpT(LCmpT* instr) {
2012 Token::Value op = instr->op();
2013
2014 Handle<Code> ic = CompareIC::GetUninitialized(op);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002015 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002016
2017 Condition condition = ComputeCompareCondition(op);
2018 if (op == Token::GT || op == Token::LTE) {
2019 condition = ReverseCondition(condition);
2020 }
2021 NearLabel true_value, done;
2022 __ test(eax, Operand(eax));
2023 __ j(condition, &true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01002024 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002025 __ jmp(&done);
2026 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01002027 __ mov(ToRegister(instr->result()), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002028 __ bind(&done);
2029}
2030
2031
2032void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
2033 Token::Value op = instr->op();
2034 int true_block = chunk_->LookupDestination(instr->true_block_id());
2035 int false_block = chunk_->LookupDestination(instr->false_block_id());
2036
2037 Handle<Code> ic = CompareIC::GetUninitialized(op);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002038 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002039
2040 // The compare stub expects compare condition and the input operands
2041 // reversed for GT and LTE.
2042 Condition condition = ComputeCompareCondition(op);
2043 if (op == Token::GT || op == Token::LTE) {
2044 condition = ReverseCondition(condition);
2045 }
2046 __ test(eax, Operand(eax));
2047 EmitBranch(true_block, false_block, condition);
2048}
2049
2050
2051void LCodeGen::DoReturn(LReturn* instr) {
2052 if (FLAG_trace) {
Steve Block1e0659c2011-05-24 12:43:12 +01002053 // Preserve the return value on the stack and rely on the runtime call
2054 // to return the value in the same register. We're leaving the code
2055 // managed by the register allocator and tearing down the frame, it's
2056 // safe to write to the context register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002057 __ push(eax);
Steve Block1e0659c2011-05-24 12:43:12 +01002058 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002059 __ CallRuntime(Runtime::kTraceExit, 1);
2060 }
2061 __ mov(esp, ebp);
2062 __ pop(ebp);
Steve Block053d10c2011-06-13 19:13:29 +01002063 __ Ret((ParameterCount() + 1) * kPointerSize, ecx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002064}
2065
2066
Ben Murdoch8b112d22011-06-08 16:22:53 +01002067void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002068 Register result = ToRegister(instr->result());
2069 __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
2070 if (instr->hydrogen()->check_hole_value()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002071 __ cmp(result, factory()->the_hole_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002072 DeoptimizeIf(equal, instr->environment());
2073 }
2074}
2075
2076
Ben Murdoch8b112d22011-06-08 16:22:53 +01002077void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2078 ASSERT(ToRegister(instr->context()).is(esi));
2079 ASSERT(ToRegister(instr->global_object()).is(eax));
2080 ASSERT(ToRegister(instr->result()).is(eax));
2081
2082 __ mov(ecx, instr->name());
2083 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2084 RelocInfo::CODE_TARGET_CONTEXT;
2085 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2086 CallCode(ic, mode, instr, CONTEXT_ADJUSTED);
2087}
2088
2089
2090void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002091 Register value = ToRegister(instr->InputAt(0));
Steve Block1e0659c2011-05-24 12:43:12 +01002092 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell());
2093
2094 // If the cell we are storing to contains the hole it could have
2095 // been deleted from the property dictionary. In that case, we need
2096 // to update the property details in the property dictionary to mark
2097 // it as no longer deleted. We deoptimize in that case.
2098 if (instr->hydrogen()->check_hole_value()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002099 __ cmp(cell_operand, factory()->the_hole_value());
Steve Block1e0659c2011-05-24 12:43:12 +01002100 DeoptimizeIf(equal, instr->environment());
2101 }
2102
2103 // Store the value.
2104 __ mov(cell_operand, value);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002105}
2106
2107
Ben Murdoch8b112d22011-06-08 16:22:53 +01002108void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2109 ASSERT(ToRegister(instr->context()).is(esi));
2110 ASSERT(ToRegister(instr->global_object()).is(edx));
2111 ASSERT(ToRegister(instr->value()).is(eax));
2112
2113 __ mov(ecx, instr->name());
2114 Handle<Code> ic = instr->strict_mode()
2115 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2116 : isolate()->builtins()->StoreIC_Initialize();
2117 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED);
2118}
2119
2120
Ben Murdochb8e0da22011-05-16 14:20:40 +01002121void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002122 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002123 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002124 __ mov(result, ContextOperand(context, instr->slot_index()));
2125}
2126
2127
2128void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2129 Register context = ToRegister(instr->context());
2130 Register value = ToRegister(instr->value());
2131 __ mov(ContextOperand(context, instr->slot_index()), value);
2132 if (instr->needs_write_barrier()) {
2133 Register temp = ToRegister(instr->TempAt(0));
2134 int offset = Context::SlotOffset(instr->slot_index());
2135 __ RecordWrite(context, offset, value, temp);
2136 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002137}
2138
2139
Ben Murdochb0fe1622011-05-05 13:52:32 +01002140void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002141 Register object = ToRegister(instr->object());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002142 Register result = ToRegister(instr->result());
2143 if (instr->hydrogen()->is_in_object()) {
2144 __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
2145 } else {
2146 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2147 __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
2148 }
2149}
2150
2151
Steve Block44f0eee2011-05-26 01:26:41 +01002152void LCodeGen::EmitLoadField(Register result,
2153 Register object,
2154 Handle<Map> type,
2155 Handle<String> name) {
2156 LookupResult lookup;
2157 type->LookupInDescriptors(NULL, *name, &lookup);
2158 ASSERT(lookup.IsProperty() && lookup.type() == FIELD);
2159 int index = lookup.GetLocalFieldIndexFromMap(*type);
2160 int offset = index * kPointerSize;
2161 if (index < 0) {
2162 // Negative property indices are in-object properties, indexed
2163 // from the end of the fixed part of the object.
2164 __ mov(result, FieldOperand(object, offset + type->instance_size()));
2165 } else {
2166 // Non-negative property indices are in the properties array.
2167 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2168 __ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
2169 }
2170}
2171
2172
2173void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2174 Register object = ToRegister(instr->object());
2175 Register result = ToRegister(instr->result());
2176
2177 int map_count = instr->hydrogen()->types()->length();
2178 Handle<String> name = instr->hydrogen()->name();
2179 if (map_count == 0) {
2180 ASSERT(instr->hydrogen()->need_generic());
2181 __ mov(ecx, name);
2182 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch8b112d22011-06-08 16:22:53 +01002183 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Steve Block44f0eee2011-05-26 01:26:41 +01002184 } else {
2185 NearLabel done;
2186 for (int i = 0; i < map_count - 1; ++i) {
2187 Handle<Map> map = instr->hydrogen()->types()->at(i);
2188 NearLabel next;
2189 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2190 __ j(not_equal, &next);
2191 EmitLoadField(result, object, map, name);
2192 __ jmp(&done);
2193 __ bind(&next);
2194 }
2195 Handle<Map> map = instr->hydrogen()->types()->last();
2196 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2197 if (instr->hydrogen()->need_generic()) {
2198 NearLabel generic;
2199 __ j(not_equal, &generic);
2200 EmitLoadField(result, object, map, name);
2201 __ jmp(&done);
2202 __ bind(&generic);
2203 __ mov(ecx, name);
2204 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch8b112d22011-06-08 16:22:53 +01002205 CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Steve Block44f0eee2011-05-26 01:26:41 +01002206 } else {
2207 DeoptimizeIf(not_equal, instr->environment());
2208 EmitLoadField(result, object, map, name);
2209 }
2210 __ bind(&done);
2211 }
2212}
2213
2214
Ben Murdochb0fe1622011-05-05 13:52:32 +01002215void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002216 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002217 ASSERT(ToRegister(instr->object()).is(eax));
2218 ASSERT(ToRegister(instr->result()).is(eax));
2219
2220 __ mov(ecx, instr->name());
Steve Block44f0eee2011-05-26 01:26:41 +01002221 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch8b112d22011-06-08 16:22:53 +01002222 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002223}
2224
2225
Steve Block9fac8402011-05-12 15:51:54 +01002226void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2227 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002228 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002229 Register result = ToRegister(instr->result());
2230
2231 // Check that the function really is a function.
2232 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
2233 DeoptimizeIf(not_equal, instr->environment());
2234
2235 // Check whether the function has an instance prototype.
2236 NearLabel non_instance;
2237 __ test_b(FieldOperand(result, Map::kBitFieldOffset),
2238 1 << Map::kHasNonInstancePrototype);
2239 __ j(not_zero, &non_instance);
2240
2241 // Get the prototype or initial map from the function.
2242 __ mov(result,
2243 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2244
2245 // Check that the function has a prototype or an initial map.
Steve Block44f0eee2011-05-26 01:26:41 +01002246 __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
Steve Block9fac8402011-05-12 15:51:54 +01002247 DeoptimizeIf(equal, instr->environment());
2248
2249 // If the function does not have an initial map, we're done.
2250 NearLabel done;
2251 __ CmpObjectType(result, MAP_TYPE, temp);
2252 __ j(not_equal, &done);
2253
2254 // Get the prototype from the initial map.
2255 __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
2256 __ jmp(&done);
2257
2258 // Non-instance prototype: Fetch prototype from constructor field
2259 // in the function's map.
2260 __ bind(&non_instance);
2261 __ mov(result, FieldOperand(result, Map::kConstructorOffset));
2262
2263 // All done.
2264 __ bind(&done);
2265}
2266
2267
Ben Murdochb0fe1622011-05-05 13:52:32 +01002268void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002269 Register result = ToRegister(instr->result());
2270 Register input = ToRegister(instr->InputAt(0));
2271 __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002272 if (FLAG_debug_code) {
2273 NearLabel done;
Steve Block1e0659c2011-05-24 12:43:12 +01002274 __ cmp(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002275 Immediate(factory()->fixed_array_map()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002276 __ j(equal, &done);
Steve Block1e0659c2011-05-24 12:43:12 +01002277 __ cmp(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002278 Immediate(factory()->fixed_cow_array_map()));
Steve Block1e0659c2011-05-24 12:43:12 +01002279 __ j(equal, &done);
Steve Block44f0eee2011-05-26 01:26:41 +01002280 Register temp((result.is(eax)) ? ebx : eax);
2281 __ push(temp);
2282 __ mov(temp, FieldOperand(result, HeapObject::kMapOffset));
2283 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
2284 __ sub(Operand(temp), Immediate(FIRST_EXTERNAL_ARRAY_TYPE));
2285 __ cmp(Operand(temp), Immediate(kExternalArrayTypeCount));
2286 __ pop(temp);
2287 __ Check(below, "Check for fast elements or pixel array failed.");
Ben Murdochb0fe1622011-05-05 13:52:32 +01002288 __ bind(&done);
2289 }
2290}
2291
2292
Steve Block44f0eee2011-05-26 01:26:41 +01002293void LCodeGen::DoLoadExternalArrayPointer(
2294 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002295 Register result = ToRegister(instr->result());
2296 Register input = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002297 __ mov(result, FieldOperand(input,
2298 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002299}
2300
2301
Ben Murdochb0fe1622011-05-05 13:52:32 +01002302void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2303 Register arguments = ToRegister(instr->arguments());
2304 Register length = ToRegister(instr->length());
2305 Operand index = ToOperand(instr->index());
2306 Register result = ToRegister(instr->result());
2307
2308 __ sub(length, index);
2309 DeoptimizeIf(below_equal, instr->environment());
2310
Ben Murdoch086aeea2011-05-13 15:57:08 +01002311 // There are two words between the frame pointer and the last argument.
2312 // Subtracting from length accounts for one of them add one more.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002313 __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2314}
2315
2316
2317void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2318 Register elements = ToRegister(instr->elements());
2319 Register key = ToRegister(instr->key());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002320 Register result = ToRegister(instr->result());
2321 ASSERT(result.is(elements));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002322
2323 // Load the result.
Steve Block1e0659c2011-05-24 12:43:12 +01002324 __ mov(result, FieldOperand(elements,
2325 key,
2326 times_pointer_size,
2327 FixedArray::kHeaderSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002328
Ben Murdochb8e0da22011-05-16 14:20:40 +01002329 // Check for the hole value.
Steve Block44f0eee2011-05-26 01:26:41 +01002330 __ cmp(result, factory()->the_hole_value());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002331 DeoptimizeIf(equal, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002332}
2333
2334
Steve Block44f0eee2011-05-26 01:26:41 +01002335void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2336 LLoadKeyedSpecializedArrayElement* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002337 Register external_pointer = ToRegister(instr->external_pointer());
Steve Block1e0659c2011-05-24 12:43:12 +01002338 Register key = ToRegister(instr->key());
Steve Block44f0eee2011-05-26 01:26:41 +01002339 ExternalArrayType array_type = instr->array_type();
2340 if (array_type == kExternalFloatArray) {
2341 XMMRegister result(ToDoubleRegister(instr->result()));
2342 __ movss(result, Operand(external_pointer, key, times_4, 0));
2343 __ cvtss2sd(result, result);
2344 } else {
2345 Register result(ToRegister(instr->result()));
2346 switch (array_type) {
2347 case kExternalByteArray:
2348 __ movsx_b(result, Operand(external_pointer, key, times_1, 0));
2349 break;
2350 case kExternalUnsignedByteArray:
2351 case kExternalPixelArray:
2352 __ movzx_b(result, Operand(external_pointer, key, times_1, 0));
2353 break;
2354 case kExternalShortArray:
2355 __ movsx_w(result, Operand(external_pointer, key, times_2, 0));
2356 break;
2357 case kExternalUnsignedShortArray:
2358 __ movzx_w(result, Operand(external_pointer, key, times_2, 0));
2359 break;
2360 case kExternalIntArray:
2361 __ mov(result, Operand(external_pointer, key, times_4, 0));
2362 break;
2363 case kExternalUnsignedIntArray:
2364 __ mov(result, Operand(external_pointer, key, times_4, 0));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002365 __ test(result, Operand(result));
Steve Block44f0eee2011-05-26 01:26:41 +01002366 // TODO(danno): we could be more clever here, perhaps having a special
2367 // version of the stub that detects if the overflow case actually
2368 // happens, and generate code that returns a double rather than int.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002369 DeoptimizeIf(negative, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01002370 break;
2371 case kExternalFloatArray:
2372 UNREACHABLE();
2373 break;
2374 }
2375 }
Steve Block1e0659c2011-05-24 12:43:12 +01002376}
2377
2378
Ben Murdochb0fe1622011-05-05 13:52:32 +01002379void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002380 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002381 ASSERT(ToRegister(instr->object()).is(edx));
2382 ASSERT(ToRegister(instr->key()).is(eax));
2383
Steve Block44f0eee2011-05-26 01:26:41 +01002384 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdoch8b112d22011-06-08 16:22:53 +01002385 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002386}
2387
2388
2389void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2390 Register result = ToRegister(instr->result());
2391
2392 // Check for arguments adapter frame.
Ben Murdoch086aeea2011-05-13 15:57:08 +01002393 NearLabel done, adapted;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002394 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2395 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
2396 __ cmp(Operand(result),
2397 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2398 __ j(equal, &adapted);
2399
2400 // No arguments adaptor frame.
2401 __ mov(result, Operand(ebp));
2402 __ jmp(&done);
2403
2404 // Arguments adaptor frame present.
2405 __ bind(&adapted);
2406 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2407
Ben Murdoch086aeea2011-05-13 15:57:08 +01002408 // Result is the frame pointer for the frame if not adapted and for the real
2409 // frame below the adaptor frame if adapted.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002410 __ bind(&done);
2411}
2412
2413
2414void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002415 Operand elem = ToOperand(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002416 Register result = ToRegister(instr->result());
2417
Ben Murdoch086aeea2011-05-13 15:57:08 +01002418 NearLabel done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002419
Ben Murdoch086aeea2011-05-13 15:57:08 +01002420 // If no arguments adaptor frame the number of arguments is fixed.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002421 __ cmp(ebp, elem);
2422 __ mov(result, Immediate(scope()->num_parameters()));
2423 __ j(equal, &done);
2424
2425 // Arguments adaptor frame present. Get argument length from there.
2426 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2427 __ mov(result, Operand(result,
2428 ArgumentsAdaptorFrameConstants::kLengthOffset));
2429 __ SmiUntag(result);
2430
Ben Murdoch086aeea2011-05-13 15:57:08 +01002431 // Argument length is in result register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002432 __ bind(&done);
2433}
2434
2435
2436void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2437 Register receiver = ToRegister(instr->receiver());
Steve Block1e0659c2011-05-24 12:43:12 +01002438 Register function = ToRegister(instr->function());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002439 Register length = ToRegister(instr->length());
2440 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002441 Register scratch = ToRegister(instr->TempAt(0));
2442 ASSERT(receiver.is(eax)); // Used for parameter count.
2443 ASSERT(function.is(edi)); // Required by InvokeFunction.
2444 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002445
Steve Block1e0659c2011-05-24 12:43:12 +01002446 // If the receiver is null or undefined, we have to pass the global object
2447 // as a receiver.
2448 NearLabel global_object, receiver_ok;
Steve Block44f0eee2011-05-26 01:26:41 +01002449 __ cmp(receiver, factory()->null_value());
Steve Block1e0659c2011-05-24 12:43:12 +01002450 __ j(equal, &global_object);
Steve Block44f0eee2011-05-26 01:26:41 +01002451 __ cmp(receiver, factory()->undefined_value());
Steve Block1e0659c2011-05-24 12:43:12 +01002452 __ j(equal, &global_object);
2453
2454 // The receiver should be a JS object.
2455 __ test(receiver, Immediate(kSmiTagMask));
2456 DeoptimizeIf(equal, instr->environment());
2457 __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, scratch);
2458 DeoptimizeIf(below, instr->environment());
2459 __ jmp(&receiver_ok);
2460
2461 __ bind(&global_object);
2462 // TODO(kmillikin): We have a hydrogen value for the global object. See
2463 // if it's better to use it than to explicitly fetch it from the context
2464 // here.
2465 __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
2466 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2467 __ bind(&receiver_ok);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002468
2469 // Copy the arguments to this function possibly from the
2470 // adaptor frame below it.
2471 const uint32_t kArgumentsLimit = 1 * KB;
2472 __ cmp(length, kArgumentsLimit);
2473 DeoptimizeIf(above, instr->environment());
2474
2475 __ push(receiver);
2476 __ mov(receiver, length);
2477
2478 // Loop through the arguments pushing them onto the execution
2479 // stack.
Steve Block1e0659c2011-05-24 12:43:12 +01002480 NearLabel invoke, loop;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002481 // length is a small non-negative integer, due to the test above.
2482 __ test(length, Operand(length));
2483 __ j(zero, &invoke);
2484 __ bind(&loop);
2485 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2486 __ dec(length);
2487 __ j(not_zero, &loop);
2488
2489 // Invoke the function.
2490 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002491 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2492 LPointerMap* pointers = instr->pointer_map();
2493 LEnvironment* env = instr->deoptimization_environment();
2494 RecordPosition(pointers->position());
2495 RegisterEnvironmentForDeoptimization(env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002496 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01002497 pointers,
Steve Block44f0eee2011-05-26 01:26:41 +01002498 env->deoptimization_index());
Steve Block053d10c2011-06-13 19:13:29 +01002499 v8::internal::ParameterCount actual(eax);
Steve Block1e0659c2011-05-24 12:43:12 +01002500 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002501}
2502
2503
2504void LCodeGen::DoPushArgument(LPushArgument* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002505 LOperand* argument = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002506 if (argument->IsConstantOperand()) {
2507 __ push(ToImmediate(argument));
2508 } else {
2509 __ push(ToOperand(argument));
2510 }
2511}
2512
2513
Steve Block1e0659c2011-05-24 12:43:12 +01002514void LCodeGen::DoContext(LContext* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002515 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002516 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
2517}
2518
2519
2520void LCodeGen::DoOuterContext(LOuterContext* instr) {
2521 Register context = ToRegister(instr->context());
2522 Register result = ToRegister(instr->result());
2523 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2524 __ mov(result, FieldOperand(result, JSFunction::kContextOffset));
2525}
2526
2527
2528void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2529 Register context = ToRegister(instr->context());
2530 Register result = ToRegister(instr->result());
2531 __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002532}
2533
2534
2535void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002536 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002537 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002538 __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002539}
2540
2541
2542void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2543 int arity,
2544 LInstruction* instr) {
2545 // Change context if needed.
2546 bool change_context =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002547 (info()->closure()->context() != function->context()) ||
Ben Murdochb0fe1622011-05-05 13:52:32 +01002548 scope()->contains_with() ||
2549 (scope()->num_heap_slots() > 0);
2550 if (change_context) {
2551 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002552 } else {
2553 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002554 }
2555
2556 // Set eax to arguments count if adaption is not needed. Assumes that eax
2557 // is available to write to at this point.
2558 if (!function->NeedsArgumentsAdaption()) {
2559 __ mov(eax, arity);
2560 }
2561
2562 LPointerMap* pointers = instr->pointer_map();
2563 RecordPosition(pointers->position());
2564
2565 // Invoke function.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002566 if (*function == *info()->closure()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002567 __ CallSelf();
2568 } else {
2569 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2570 }
2571
2572 // Setup deoptimization.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002573 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002574}
2575
2576
2577void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2578 ASSERT(ToRegister(instr->result()).is(eax));
2579 __ mov(edi, instr->function());
2580 CallKnownFunction(instr->function(), instr->arity(), instr);
2581}
2582
2583
2584void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002585 Register input_reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002586 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002587 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002588 DeoptimizeIf(not_equal, instr->environment());
2589
2590 Label done;
2591 Register tmp = input_reg.is(eax) ? ecx : eax;
2592 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2593
2594 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002595 PushSafepointRegistersScope scope(this);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002596
2597 Label negative;
2598 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002599 // Check the sign of the argument. If the argument is positive, just
2600 // return it. We do not need to patch the stack since |input| and
2601 // |result| are the same register and |input| will be restored
2602 // unchanged by popping safepoint registers.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002603 __ test(tmp, Immediate(HeapNumber::kSignMask));
2604 __ j(not_zero, &negative);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002605 __ jmp(&done);
2606
2607 __ bind(&negative);
2608
2609 Label allocated, slow;
2610 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2611 __ jmp(&allocated);
2612
2613 // Slow case: Call the runtime system to do the number allocation.
2614 __ bind(&slow);
2615
Ben Murdoch8b112d22011-06-08 16:22:53 +01002616 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
2617
Ben Murdochb0fe1622011-05-05 13:52:32 +01002618 // Set the pointer to the new heap number in tmp.
2619 if (!tmp.is(eax)) __ mov(tmp, eax);
2620
2621 // Restore input_reg after call to runtime.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002622 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002623
2624 __ bind(&allocated);
2625 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2626 __ and_(tmp2, ~HeapNumber::kSignMask);
2627 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2628 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2629 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002630 __ StoreToSafepointRegisterSlot(input_reg, tmp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002631
Steve Block1e0659c2011-05-24 12:43:12 +01002632 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002633}
2634
2635
Steve Block1e0659c2011-05-24 12:43:12 +01002636void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2637 Register input_reg = ToRegister(instr->InputAt(0));
2638 __ test(input_reg, Operand(input_reg));
2639 Label is_positive;
2640 __ j(not_sign, &is_positive);
2641 __ neg(input_reg);
2642 __ test(input_reg, Operand(input_reg));
2643 DeoptimizeIf(negative, instr->environment());
2644 __ bind(&is_positive);
2645}
2646
2647
Ben Murdochb0fe1622011-05-05 13:52:32 +01002648void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2649 // Class for deferred case.
2650 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2651 public:
2652 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2653 LUnaryMathOperation* instr)
2654 : LDeferredCode(codegen), instr_(instr) { }
2655 virtual void Generate() {
2656 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2657 }
2658 private:
2659 LUnaryMathOperation* instr_;
2660 };
2661
Ben Murdochb8e0da22011-05-16 14:20:40 +01002662 ASSERT(instr->InputAt(0)->Equals(instr->result()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002663 Representation r = instr->hydrogen()->value()->representation();
2664
2665 if (r.IsDouble()) {
2666 XMMRegister scratch = xmm0;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002667 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002668 __ pxor(scratch, scratch);
2669 __ subsd(scratch, input_reg);
2670 __ pand(input_reg, scratch);
2671 } else if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01002672 EmitIntegerMathAbs(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002673 } else { // Tagged case.
2674 DeferredMathAbsTaggedHeapNumber* deferred =
2675 new DeferredMathAbsTaggedHeapNumber(this, instr);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002676 Register input_reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002677 // Smi check.
2678 __ test(input_reg, Immediate(kSmiTagMask));
2679 __ j(not_zero, deferred->entry());
Steve Block1e0659c2011-05-24 12:43:12 +01002680 EmitIntegerMathAbs(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002681 __ bind(deferred->exit());
2682 }
2683}
2684
2685
2686void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2687 XMMRegister xmm_scratch = xmm0;
2688 Register output_reg = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002689 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002690 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2691 __ ucomisd(input_reg, xmm_scratch);
2692
2693 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2694 DeoptimizeIf(below_equal, instr->environment());
2695 } else {
2696 DeoptimizeIf(below, instr->environment());
2697 }
2698
2699 // Use truncating instruction (OK because input is positive).
2700 __ cvttsd2si(output_reg, Operand(input_reg));
2701
2702 // Overflow is signalled with minint.
2703 __ cmp(output_reg, 0x80000000u);
2704 DeoptimizeIf(equal, instr->environment());
2705}
2706
2707
2708void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2709 XMMRegister xmm_scratch = xmm0;
2710 Register output_reg = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002711 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002712
2713 // xmm_scratch = 0.5
2714 ExternalReference one_half = ExternalReference::address_of_one_half();
2715 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
2716
2717 // input = input + 0.5
2718 __ addsd(input_reg, xmm_scratch);
2719
Steve Block053d10c2011-06-13 19:13:29 +01002720 // We need to return -0 for the input range [-0.5, 0[, otherwise
2721 // compute Math.floor(value + 0.5).
2722 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2723 __ ucomisd(input_reg, xmm_scratch);
2724 DeoptimizeIf(below_equal, instr->environment());
2725 } else {
2726 // If we don't need to bailout on -0, we check only bailout
2727 // on negative inputs.
2728 __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2729 __ ucomisd(input_reg, xmm_scratch);
2730 DeoptimizeIf(below, instr->environment());
2731 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002732
2733 // Compute Math.floor(value + 0.5).
2734 // Use truncating instruction (OK because input is positive).
2735 __ cvttsd2si(output_reg, Operand(input_reg));
2736
2737 // Overflow is signalled with minint.
2738 __ cmp(output_reg, 0x80000000u);
2739 DeoptimizeIf(equal, instr->environment());
2740}
2741
2742
2743void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002744 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002745 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2746 __ sqrtsd(input_reg, input_reg);
2747}
2748
2749
2750void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2751 XMMRegister xmm_scratch = xmm0;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002752 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002753 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
Steve Block1e0659c2011-05-24 12:43:12 +01002754 __ xorpd(xmm_scratch, xmm_scratch);
2755 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002756 __ sqrtsd(input_reg, input_reg);
2757}
2758
2759
2760void LCodeGen::DoPower(LPower* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002761 LOperand* left = instr->InputAt(0);
2762 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002763 DoubleRegister result_reg = ToDoubleRegister(instr->result());
2764 Representation exponent_type = instr->hydrogen()->right()->representation();
Steve Block44f0eee2011-05-26 01:26:41 +01002765
Ben Murdochb0fe1622011-05-05 13:52:32 +01002766 if (exponent_type.IsDouble()) {
2767 // It is safe to use ebx directly since the instruction is marked
2768 // as a call.
2769 __ PrepareCallCFunction(4, ebx);
2770 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2771 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01002772 __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
2773 4);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002774 } else if (exponent_type.IsInteger32()) {
2775 // It is safe to use ebx directly since the instruction is marked
2776 // as a call.
2777 ASSERT(!ToRegister(right).is(ebx));
2778 __ PrepareCallCFunction(4, ebx);
2779 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2780 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
Steve Block44f0eee2011-05-26 01:26:41 +01002781 __ CallCFunction(ExternalReference::power_double_int_function(isolate()),
2782 4);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002783 } else {
2784 ASSERT(exponent_type.IsTagged());
2785 CpuFeatures::Scope scope(SSE2);
2786 Register right_reg = ToRegister(right);
2787
2788 Label non_smi, call;
2789 __ test(right_reg, Immediate(kSmiTagMask));
2790 __ j(not_zero, &non_smi);
2791 __ SmiUntag(right_reg);
2792 __ cvtsi2sd(result_reg, Operand(right_reg));
2793 __ jmp(&call);
2794
2795 __ bind(&non_smi);
2796 // It is safe to use ebx directly since the instruction is marked
2797 // as a call.
2798 ASSERT(!right_reg.is(ebx));
2799 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx);
2800 DeoptimizeIf(not_equal, instr->environment());
2801 __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset));
2802
2803 __ bind(&call);
2804 __ PrepareCallCFunction(4, ebx);
2805 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2806 __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
Steve Block44f0eee2011-05-26 01:26:41 +01002807 __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
2808 4);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002809 }
2810
2811 // Return value is in st(0) on ia32.
2812 // Store it into the (fixed) result register.
2813 __ sub(Operand(esp), Immediate(kDoubleSize));
2814 __ fstp_d(Operand(esp, 0));
2815 __ movdbl(result_reg, Operand(esp, 0));
2816 __ add(Operand(esp), Immediate(kDoubleSize));
2817}
2818
2819
2820void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002821 ASSERT(instr->InputAt(0)->Equals(instr->result()));
2822 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2823 NearLabel positive, done, zero, negative;
2824 __ xorpd(xmm0, xmm0);
2825 __ ucomisd(input_reg, xmm0);
2826 __ j(above, &positive);
2827 __ j(equal, &zero);
2828 ExternalReference nan = ExternalReference::address_of_nan();
2829 __ movdbl(input_reg, Operand::StaticVariable(nan));
2830 __ jmp(&done);
2831 __ bind(&zero);
2832 __ push(Immediate(0xFFF00000));
2833 __ push(Immediate(0));
2834 __ movdbl(input_reg, Operand(esp, 0));
2835 __ add(Operand(esp), Immediate(kDoubleSize));
2836 __ jmp(&done);
2837 __ bind(&positive);
2838 __ fldln2();
2839 __ sub(Operand(esp), Immediate(kDoubleSize));
2840 __ movdbl(Operand(esp, 0), input_reg);
2841 __ fld_d(Operand(esp, 0));
2842 __ fyl2x();
2843 __ fstp_d(Operand(esp, 0));
2844 __ movdbl(input_reg, Operand(esp, 0));
2845 __ add(Operand(esp), Immediate(kDoubleSize));
2846 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002847}
2848
2849
2850void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2851 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2852 TranscendentalCacheStub stub(TranscendentalCache::COS,
2853 TranscendentalCacheStub::UNTAGGED);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002854 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002855}
2856
2857
2858void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2859 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2860 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2861 TranscendentalCacheStub::UNTAGGED);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002862 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002863}
2864
2865
2866void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2867 switch (instr->op()) {
2868 case kMathAbs:
2869 DoMathAbs(instr);
2870 break;
2871 case kMathFloor:
2872 DoMathFloor(instr);
2873 break;
2874 case kMathRound:
2875 DoMathRound(instr);
2876 break;
2877 case kMathSqrt:
2878 DoMathSqrt(instr);
2879 break;
2880 case kMathPowHalf:
2881 DoMathPowHalf(instr);
2882 break;
2883 case kMathCos:
2884 DoMathCos(instr);
2885 break;
2886 case kMathSin:
2887 DoMathSin(instr);
2888 break;
2889 case kMathLog:
2890 DoMathLog(instr);
2891 break;
2892
2893 default:
2894 UNREACHABLE();
2895 }
2896}
2897
2898
2899void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002900 ASSERT(ToRegister(instr->context()).is(esi));
2901 ASSERT(ToRegister(instr->key()).is(ecx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002902 ASSERT(ToRegister(instr->result()).is(eax));
2903
2904 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01002905 Handle<Code> ic = isolate()->stub_cache()->
2906 ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002907 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002908}
2909
2910
2911void LCodeGen::DoCallNamed(LCallNamed* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002912 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002913 ASSERT(ToRegister(instr->result()).is(eax));
2914
2915 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01002916 Handle<Code> ic = isolate()->stub_cache()->
2917 ComputeCallInitialize(arity, NOT_IN_LOOP);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002918 __ mov(ecx, instr->name());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002919 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002920}
2921
2922
2923void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002924 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002925 ASSERT(ToRegister(instr->result()).is(eax));
2926
2927 int arity = instr->arity();
2928 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002929 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002930 __ Drop(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002931}
2932
2933
2934void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002935 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002936 ASSERT(ToRegister(instr->result()).is(eax));
2937
2938 int arity = instr->arity();
Steve Block44f0eee2011-05-26 01:26:41 +01002939 Handle<Code> ic = isolate()->stub_cache()->
2940 ComputeCallInitialize(arity, NOT_IN_LOOP);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002941 __ mov(ecx, instr->name());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002942 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002943}
2944
2945
2946void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2947 ASSERT(ToRegister(instr->result()).is(eax));
2948 __ mov(edi, instr->target());
2949 CallKnownFunction(instr->target(), instr->arity(), instr);
2950}
2951
2952
2953void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002954 ASSERT(ToRegister(instr->context()).is(esi));
2955 ASSERT(ToRegister(instr->constructor()).is(edi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002956 ASSERT(ToRegister(instr->result()).is(eax));
2957
Steve Block44f0eee2011-05-26 01:26:41 +01002958 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002959 __ Set(eax, Immediate(instr->arity()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002960 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002961}
2962
2963
2964void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002965 CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002966}
2967
2968
2969void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2970 Register object = ToRegister(instr->object());
2971 Register value = ToRegister(instr->value());
2972 int offset = instr->offset();
2973
2974 if (!instr->transition().is_null()) {
2975 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
2976 }
2977
2978 // Do the store.
2979 if (instr->is_in_object()) {
2980 __ mov(FieldOperand(object, offset), value);
2981 if (instr->needs_write_barrier()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002982 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002983 // Update the write barrier for the object for in-object properties.
2984 __ RecordWrite(object, offset, value, temp);
2985 }
2986 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002987 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002988 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
2989 __ mov(FieldOperand(temp, offset), value);
2990 if (instr->needs_write_barrier()) {
2991 // Update the write barrier for the properties array.
2992 // object is used as a scratch register.
2993 __ RecordWrite(temp, offset, value, object);
2994 }
2995 }
2996}
2997
2998
2999void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003000 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003001 ASSERT(ToRegister(instr->object()).is(edx));
3002 ASSERT(ToRegister(instr->value()).is(eax));
3003
3004 __ mov(ecx, instr->name());
Ben Murdoch8b112d22011-06-08 16:22:53 +01003005 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003006 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3007 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch8b112d22011-06-08 16:22:53 +01003008 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003009}
3010
3011
3012void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
3013 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
3014 DeoptimizeIf(above_equal, instr->environment());
3015}
3016
3017
Steve Block44f0eee2011-05-26 01:26:41 +01003018void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3019 LStoreKeyedSpecializedArrayElement* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003020 Register external_pointer = ToRegister(instr->external_pointer());
3021 Register key = ToRegister(instr->key());
Steve Block44f0eee2011-05-26 01:26:41 +01003022 ExternalArrayType array_type = instr->array_type();
3023 if (array_type == kExternalFloatArray) {
3024 __ cvtsd2ss(xmm0, ToDoubleRegister(instr->value()));
3025 __ movss(Operand(external_pointer, key, times_4, 0), xmm0);
3026 } else {
3027 Register value = ToRegister(instr->value());
3028 switch (array_type) {
3029 case kExternalPixelArray: {
3030 // Clamp the value to [0..255].
3031 Register temp = ToRegister(instr->TempAt(0));
3032 // The dec_b below requires that the clamped value is in a byte
3033 // register. eax is an arbitrary choice to satisfy this requirement, we
3034 // hinted the register allocator to give us eax when building the
3035 // instruction.
3036 ASSERT(temp.is(eax));
3037 __ mov(temp, ToRegister(instr->value()));
3038 NearLabel done;
3039 __ test(temp, Immediate(0xFFFFFF00));
3040 __ j(zero, &done);
3041 __ setcc(negative, temp); // 1 if negative, 0 if positive.
3042 __ dec_b(temp); // 0 if negative, 255 if positive.
3043 __ bind(&done);
3044 __ mov_b(Operand(external_pointer, key, times_1, 0), temp);
3045 break;
3046 }
3047 case kExternalByteArray:
3048 case kExternalUnsignedByteArray:
3049 __ mov_b(Operand(external_pointer, key, times_1, 0), value);
3050 break;
3051 case kExternalShortArray:
3052 case kExternalUnsignedShortArray:
3053 __ mov_w(Operand(external_pointer, key, times_2, 0), value);
3054 break;
3055 case kExternalIntArray:
3056 case kExternalUnsignedIntArray:
3057 __ mov(Operand(external_pointer, key, times_4, 0), value);
3058 break;
3059 case kExternalFloatArray:
3060 UNREACHABLE();
3061 break;
3062 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003063 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003064}
3065
3066
Ben Murdochb0fe1622011-05-05 13:52:32 +01003067void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3068 Register value = ToRegister(instr->value());
3069 Register elements = ToRegister(instr->object());
3070 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3071
3072 // Do the store.
3073 if (instr->key()->IsConstantOperand()) {
3074 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3075 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3076 int offset =
3077 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3078 __ mov(FieldOperand(elements, offset), value);
3079 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01003080 __ mov(FieldOperand(elements,
3081 key,
3082 times_pointer_size,
3083 FixedArray::kHeaderSize),
Ben Murdochb0fe1622011-05-05 13:52:32 +01003084 value);
3085 }
3086
Ben Murdochb0fe1622011-05-05 13:52:32 +01003087 if (instr->hydrogen()->NeedsWriteBarrier()) {
3088 // Compute address of modified element and store it into key register.
Steve Block1e0659c2011-05-24 12:43:12 +01003089 __ lea(key,
3090 FieldOperand(elements,
3091 key,
3092 times_pointer_size,
3093 FixedArray::kHeaderSize));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003094 __ RecordWrite(elements, key, value);
3095 }
3096}
3097
3098
3099void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003100 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003101 ASSERT(ToRegister(instr->object()).is(edx));
3102 ASSERT(ToRegister(instr->key()).is(ecx));
3103 ASSERT(ToRegister(instr->value()).is(eax));
3104
Ben Murdoch8b112d22011-06-08 16:22:53 +01003105 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003106 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3107 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdoch8b112d22011-06-08 16:22:53 +01003108 CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003109}
3110
3111
Steve Block1e0659c2011-05-24 12:43:12 +01003112void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3113 class DeferredStringCharCodeAt: public LDeferredCode {
3114 public:
3115 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3116 : LDeferredCode(codegen), instr_(instr) { }
3117 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3118 private:
3119 LStringCharCodeAt* instr_;
3120 };
3121
3122 Register string = ToRegister(instr->string());
3123 Register index = no_reg;
3124 int const_index = -1;
3125 if (instr->index()->IsConstantOperand()) {
3126 const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3127 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3128 if (!Smi::IsValid(const_index)) {
3129 // Guaranteed to be out of bounds because of the assert above.
3130 // So the bounds check that must dominate this instruction must
3131 // have deoptimized already.
3132 if (FLAG_debug_code) {
3133 __ Abort("StringCharCodeAt: out of bounds index.");
3134 }
3135 // No code needs to be generated.
3136 return;
3137 }
3138 } else {
3139 index = ToRegister(instr->index());
3140 }
3141 Register result = ToRegister(instr->result());
3142
3143 DeferredStringCharCodeAt* deferred =
3144 new DeferredStringCharCodeAt(this, instr);
3145
3146 NearLabel flat_string, ascii_string, done;
3147
3148 // Fetch the instance type of the receiver into result register.
3149 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
3150 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
3151
3152 // We need special handling for non-flat strings.
3153 STATIC_ASSERT(kSeqStringTag == 0);
3154 __ test(result, Immediate(kStringRepresentationMask));
3155 __ j(zero, &flat_string);
3156
3157 // Handle non-flat strings.
3158 __ test(result, Immediate(kIsConsStringMask));
3159 __ j(zero, deferred->entry());
3160
3161 // ConsString.
3162 // Check whether the right hand side is the empty string (i.e. if
3163 // this is really a flat string in a cons string). If that is not
3164 // the case we would rather go to the runtime system now to flatten
3165 // the string.
3166 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003167 Immediate(factory()->empty_string()));
Steve Block1e0659c2011-05-24 12:43:12 +01003168 __ j(not_equal, deferred->entry());
3169 // Get the first of the two strings and load its instance type.
3170 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
3171 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
3172 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
3173 // If the first cons component is also non-flat, then go to runtime.
3174 STATIC_ASSERT(kSeqStringTag == 0);
3175 __ test(result, Immediate(kStringRepresentationMask));
3176 __ j(not_zero, deferred->entry());
3177
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003178 // Check for ASCII or two-byte string.
Steve Block1e0659c2011-05-24 12:43:12 +01003179 __ bind(&flat_string);
3180 STATIC_ASSERT(kAsciiStringTag != 0);
3181 __ test(result, Immediate(kStringEncodingMask));
3182 __ j(not_zero, &ascii_string);
3183
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003184 // Two-byte string.
3185 // Load the two-byte character code into the result register.
Steve Block1e0659c2011-05-24 12:43:12 +01003186 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3187 if (instr->index()->IsConstantOperand()) {
3188 __ movzx_w(result,
3189 FieldOperand(string,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003190 SeqTwoByteString::kHeaderSize +
3191 (kUC16Size * const_index)));
Steve Block1e0659c2011-05-24 12:43:12 +01003192 } else {
3193 __ movzx_w(result, FieldOperand(string,
3194 index,
3195 times_2,
3196 SeqTwoByteString::kHeaderSize));
3197 }
3198 __ jmp(&done);
3199
3200 // ASCII string.
3201 // Load the byte into the result register.
3202 __ bind(&ascii_string);
3203 if (instr->index()->IsConstantOperand()) {
3204 __ movzx_b(result, FieldOperand(string,
3205 SeqAsciiString::kHeaderSize + const_index));
3206 } else {
3207 __ movzx_b(result, FieldOperand(string,
3208 index,
3209 times_1,
3210 SeqAsciiString::kHeaderSize));
3211 }
3212 __ bind(&done);
3213 __ bind(deferred->exit());
3214}
3215
3216
3217void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3218 Register string = ToRegister(instr->string());
3219 Register result = ToRegister(instr->result());
3220
3221 // TODO(3095996): Get rid of this. For now, we need to make the
3222 // result register contain a valid pointer because it is already
3223 // contained in the register pointer map.
3224 __ Set(result, Immediate(0));
3225
Ben Murdoch8b112d22011-06-08 16:22:53 +01003226 PushSafepointRegistersScope scope(this);
Steve Block1e0659c2011-05-24 12:43:12 +01003227 __ push(string);
3228 // Push the index as a smi. This is safe because of the checks in
3229 // DoStringCharCodeAt above.
3230 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3231 if (instr->index()->IsConstantOperand()) {
3232 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3233 __ push(Immediate(Smi::FromInt(const_index)));
3234 } else {
3235 Register index = ToRegister(instr->index());
3236 __ SmiTag(index);
3237 __ push(index);
3238 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003239 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01003240 if (FLAG_debug_code) {
3241 __ AbortIfNotSmi(eax);
3242 }
3243 __ SmiUntag(eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003244 __ StoreToSafepointRegisterSlot(result, eax);
Steve Block1e0659c2011-05-24 12:43:12 +01003245}
3246
3247
Steve Block44f0eee2011-05-26 01:26:41 +01003248void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3249 class DeferredStringCharFromCode: public LDeferredCode {
3250 public:
3251 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3252 : LDeferredCode(codegen), instr_(instr) { }
3253 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3254 private:
3255 LStringCharFromCode* instr_;
3256 };
3257
3258 DeferredStringCharFromCode* deferred =
3259 new DeferredStringCharFromCode(this, instr);
3260
3261 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3262 Register char_code = ToRegister(instr->char_code());
3263 Register result = ToRegister(instr->result());
3264 ASSERT(!char_code.is(result));
3265
3266 __ cmp(char_code, String::kMaxAsciiCharCode);
3267 __ j(above, deferred->entry());
3268 __ Set(result, Immediate(factory()->single_character_string_cache()));
3269 __ mov(result, FieldOperand(result,
3270 char_code, times_pointer_size,
3271 FixedArray::kHeaderSize));
3272 __ cmp(result, factory()->undefined_value());
3273 __ j(equal, deferred->entry());
3274 __ bind(deferred->exit());
3275}
3276
3277
3278void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3279 Register char_code = ToRegister(instr->char_code());
3280 Register result = ToRegister(instr->result());
3281
3282 // TODO(3095996): Get rid of this. For now, we need to make the
3283 // result register contain a valid pointer because it is already
3284 // contained in the register pointer map.
3285 __ Set(result, Immediate(0));
3286
Ben Murdoch8b112d22011-06-08 16:22:53 +01003287 PushSafepointRegistersScope scope(this);
Steve Block44f0eee2011-05-26 01:26:41 +01003288 __ SmiTag(char_code);
3289 __ push(char_code);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003290 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01003291 __ StoreToSafepointRegisterSlot(result, eax);
Steve Block44f0eee2011-05-26 01:26:41 +01003292}
3293
3294
Steve Block1e0659c2011-05-24 12:43:12 +01003295void LCodeGen::DoStringLength(LStringLength* instr) {
3296 Register string = ToRegister(instr->string());
3297 Register result = ToRegister(instr->result());
3298 __ mov(result, FieldOperand(string, String::kLengthOffset));
3299}
3300
3301
Ben Murdochb0fe1622011-05-05 13:52:32 +01003302void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003303 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003304 ASSERT(input->IsRegister() || input->IsStackSlot());
3305 LOperand* output = instr->result();
3306 ASSERT(output->IsDoubleRegister());
3307 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
3308}
3309
3310
3311void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3312 class DeferredNumberTagI: public LDeferredCode {
3313 public:
3314 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3315 : LDeferredCode(codegen), instr_(instr) { }
3316 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3317 private:
3318 LNumberTagI* instr_;
3319 };
3320
Ben Murdochb8e0da22011-05-16 14:20:40 +01003321 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003322 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3323 Register reg = ToRegister(input);
3324
3325 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3326 __ SmiTag(reg);
3327 __ j(overflow, deferred->entry());
3328 __ bind(deferred->exit());
3329}
3330
3331
3332void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3333 Label slow;
Ben Murdochb8e0da22011-05-16 14:20:40 +01003334 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003335 Register tmp = reg.is(eax) ? ecx : eax;
3336
3337 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003338 PushSafepointRegistersScope scope(this);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003339
3340 // There was overflow, so bits 30 and 31 of the original integer
3341 // disagree. Try to allocate a heap number in new space and store
3342 // the value in there. If that fails, call the runtime system.
3343 NearLabel done;
3344 __ SmiUntag(reg);
3345 __ xor_(reg, 0x80000000);
3346 __ cvtsi2sd(xmm0, Operand(reg));
3347 if (FLAG_inline_new) {
3348 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
3349 __ jmp(&done);
3350 }
3351
3352 // Slow case: Call the runtime system to do the number allocation.
3353 __ bind(&slow);
3354
3355 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3356 // register is stored, as this register is in the pointer map, but contains an
3357 // integer value.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003358 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003359
Ben Murdoch8b112d22011-06-08 16:22:53 +01003360 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003361 if (!reg.is(eax)) __ mov(reg, eax);
3362
3363 // Done. Put the value in xmm0 into the value of the allocated heap
3364 // number.
3365 __ bind(&done);
3366 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003367 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003368}
3369
3370
3371void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3372 class DeferredNumberTagD: public LDeferredCode {
3373 public:
3374 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3375 : LDeferredCode(codegen), instr_(instr) { }
3376 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3377 private:
3378 LNumberTagD* instr_;
3379 };
3380
Ben Murdochb8e0da22011-05-16 14:20:40 +01003381 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003382 Register reg = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01003383 Register tmp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003384
3385 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3386 if (FLAG_inline_new) {
3387 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
3388 } else {
3389 __ jmp(deferred->entry());
3390 }
3391 __ bind(deferred->exit());
3392 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
3393}
3394
3395
3396void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3397 // TODO(3095996): Get rid of this. For now, we need to make the
3398 // result register contain a valid pointer because it is already
3399 // contained in the register pointer map.
3400 Register reg = ToRegister(instr->result());
3401 __ Set(reg, Immediate(0));
3402
Ben Murdoch8b112d22011-06-08 16:22:53 +01003403 PushSafepointRegistersScope scope(this);
3404 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003405 __ StoreToSafepointRegisterSlot(reg, eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003406}
3407
3408
3409void LCodeGen::DoSmiTag(LSmiTag* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003410 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003411 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3412 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3413 __ SmiTag(ToRegister(input));
3414}
3415
3416
3417void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003418 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003419 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3420 if (instr->needs_check()) {
3421 __ test(ToRegister(input), Immediate(kSmiTagMask));
3422 DeoptimizeIf(not_zero, instr->environment());
3423 }
3424 __ SmiUntag(ToRegister(input));
3425}
3426
3427
3428void LCodeGen::EmitNumberUntagD(Register input_reg,
3429 XMMRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003430 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003431 LEnvironment* env) {
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003432 NearLabel load_smi, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003433
3434 // Smi check.
3435 __ test(input_reg, Immediate(kSmiTagMask));
3436 __ j(zero, &load_smi, not_taken);
3437
3438 // Heap number map check.
3439 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003440 factory()->heap_number_map());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003441 if (deoptimize_on_undefined) {
3442 DeoptimizeIf(not_equal, env);
3443 } else {
3444 NearLabel heap_number;
3445 __ j(equal, &heap_number);
3446 __ cmp(input_reg, factory()->undefined_value());
3447 DeoptimizeIf(not_equal, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003448
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003449 // Convert undefined to NaN.
3450 ExternalReference nan = ExternalReference::address_of_nan();
3451 __ movdbl(result_reg, Operand::StaticVariable(nan));
3452 __ jmp(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003453
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003454 __ bind(&heap_number);
3455 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003456 // Heap number to XMM conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003457 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3458 __ jmp(&done);
3459
3460 // Smi to XMM conversion
3461 __ bind(&load_smi);
3462 __ SmiUntag(input_reg); // Untag smi before converting to float.
3463 __ cvtsi2sd(result_reg, Operand(input_reg));
3464 __ SmiTag(input_reg); // Retag smi.
3465 __ bind(&done);
3466}
3467
3468
3469class DeferredTaggedToI: public LDeferredCode {
3470 public:
3471 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3472 : LDeferredCode(codegen), instr_(instr) { }
3473 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3474 private:
3475 LTaggedToI* instr_;
3476};
3477
3478
3479void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3480 NearLabel done, heap_number;
Ben Murdochb8e0da22011-05-16 14:20:40 +01003481 Register input_reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003482
3483 // Heap number map check.
3484 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003485 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003486
3487 if (instr->truncating()) {
3488 __ j(equal, &heap_number);
3489 // Check for undefined. Undefined is converted to zero for truncating
3490 // conversions.
Steve Block44f0eee2011-05-26 01:26:41 +01003491 __ cmp(input_reg, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003492 DeoptimizeIf(not_equal, instr->environment());
3493 __ mov(input_reg, 0);
3494 __ jmp(&done);
3495
3496 __ bind(&heap_number);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003497 if (CpuFeatures::IsSupported(SSE3)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003498 CpuFeatures::Scope scope(SSE3);
3499 NearLabel convert;
3500 // Use more powerful conversion when sse3 is available.
3501 // Load x87 register with heap number.
3502 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
3503 // Get exponent alone and check for too-big exponent.
3504 __ mov(input_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3505 __ and_(input_reg, HeapNumber::kExponentMask);
3506 const uint32_t kTooBigExponent =
3507 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3508 __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
3509 __ j(less, &convert);
3510 // Pop FPU stack before deoptimizing.
3511 __ ffree(0);
3512 __ fincstp();
3513 DeoptimizeIf(no_condition, instr->environment());
3514
3515 // Reserve space for 64 bit answer.
3516 __ bind(&convert);
3517 __ sub(Operand(esp), Immediate(kDoubleSize));
3518 // Do conversion, which cannot fail because we checked the exponent.
3519 __ fisttp_d(Operand(esp, 0));
3520 __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3521 __ add(Operand(esp), Immediate(kDoubleSize));
3522 } else {
3523 NearLabel deopt;
Ben Murdochb8e0da22011-05-16 14:20:40 +01003524 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003525 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3526 __ cvttsd2si(input_reg, Operand(xmm0));
3527 __ cmp(input_reg, 0x80000000u);
3528 __ j(not_equal, &done);
3529 // Check if the input was 0x8000000 (kMinInt).
3530 // If no, then we got an overflow and we deoptimize.
3531 ExternalReference min_int = ExternalReference::address_of_min_int();
3532 __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
3533 __ ucomisd(xmm_temp, xmm0);
3534 DeoptimizeIf(not_equal, instr->environment());
3535 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3536 }
3537 } else {
3538 // Deoptimize if we don't have a heap number.
3539 DeoptimizeIf(not_equal, instr->environment());
3540
Ben Murdochb8e0da22011-05-16 14:20:40 +01003541 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003542 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3543 __ cvttsd2si(input_reg, Operand(xmm0));
3544 __ cvtsi2sd(xmm_temp, Operand(input_reg));
3545 __ ucomisd(xmm0, xmm_temp);
3546 DeoptimizeIf(not_equal, instr->environment());
3547 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3548 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3549 __ test(input_reg, Operand(input_reg));
3550 __ j(not_zero, &done);
3551 __ movmskpd(input_reg, xmm0);
3552 __ and_(input_reg, 1);
3553 DeoptimizeIf(not_zero, instr->environment());
3554 }
3555 }
3556 __ bind(&done);
3557}
3558
3559
3560void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003561 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003562 ASSERT(input->IsRegister());
3563 ASSERT(input->Equals(instr->result()));
3564
3565 Register input_reg = ToRegister(input);
3566
3567 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3568
3569 // Smi check.
3570 __ test(input_reg, Immediate(kSmiTagMask));
3571 __ j(not_zero, deferred->entry());
3572
3573 // Smi to int32 conversion
3574 __ SmiUntag(input_reg); // Untag smi.
3575
3576 __ bind(deferred->exit());
3577}
3578
3579
3580void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003581 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003582 ASSERT(input->IsRegister());
3583 LOperand* result = instr->result();
3584 ASSERT(result->IsDoubleRegister());
3585
3586 Register input_reg = ToRegister(input);
3587 XMMRegister result_reg = ToDoubleRegister(result);
3588
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003589 EmitNumberUntagD(input_reg, result_reg,
3590 instr->hydrogen()->deoptimize_on_undefined(),
3591 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003592}
3593
3594
3595void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003596 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003597 ASSERT(input->IsDoubleRegister());
3598 LOperand* result = instr->result();
3599 ASSERT(result->IsRegister());
3600
3601 XMMRegister input_reg = ToDoubleRegister(input);
3602 Register result_reg = ToRegister(result);
3603
3604 if (instr->truncating()) {
3605 // Performs a truncating conversion of a floating point number as used by
3606 // the JS bitwise operations.
3607 __ cvttsd2si(result_reg, Operand(input_reg));
3608 __ cmp(result_reg, 0x80000000u);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003609 if (CpuFeatures::IsSupported(SSE3)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003610 // This will deoptimize if the exponent of the input in out of range.
3611 CpuFeatures::Scope scope(SSE3);
3612 NearLabel convert, done;
3613 __ j(not_equal, &done);
3614 __ sub(Operand(esp), Immediate(kDoubleSize));
3615 __ movdbl(Operand(esp, 0), input_reg);
3616 // Get exponent alone and check for too-big exponent.
3617 __ mov(result_reg, Operand(esp, sizeof(int32_t)));
3618 __ and_(result_reg, HeapNumber::kExponentMask);
3619 const uint32_t kTooBigExponent =
3620 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3621 __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
3622 __ j(less, &convert);
3623 __ add(Operand(esp), Immediate(kDoubleSize));
3624 DeoptimizeIf(no_condition, instr->environment());
3625 __ bind(&convert);
3626 // Do conversion, which cannot fail because we checked the exponent.
3627 __ fld_d(Operand(esp, 0));
3628 __ fisttp_d(Operand(esp, 0));
3629 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
3630 __ add(Operand(esp), Immediate(kDoubleSize));
3631 __ bind(&done);
3632 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003633 NearLabel done;
3634 Register temp_reg = ToRegister(instr->TempAt(0));
3635 XMMRegister xmm_scratch = xmm0;
3636
3637 // If cvttsd2si succeeded, we're done. Otherwise, we attempt
3638 // manual conversion.
3639 __ j(not_equal, &done);
3640
3641 // Get high 32 bits of the input in result_reg and temp_reg.
3642 __ pshufd(xmm_scratch, input_reg, 1);
3643 __ movd(Operand(temp_reg), xmm_scratch);
3644 __ mov(result_reg, temp_reg);
3645
3646 // Prepare negation mask in temp_reg.
3647 __ sar(temp_reg, kBitsPerInt - 1);
3648
3649 // Extract the exponent from result_reg and subtract adjusted
3650 // bias from it. The adjustment is selected in a way such that
3651 // when the difference is zero, the answer is in the low 32 bits
3652 // of the input, otherwise a shift has to be performed.
3653 __ shr(result_reg, HeapNumber::kExponentShift);
3654 __ and_(result_reg,
3655 HeapNumber::kExponentMask >> HeapNumber::kExponentShift);
3656 __ sub(Operand(result_reg),
3657 Immediate(HeapNumber::kExponentBias +
3658 HeapNumber::kExponentBits +
3659 HeapNumber::kMantissaBits));
3660 // Don't handle big (> kMantissaBits + kExponentBits == 63) or
3661 // special exponents.
3662 DeoptimizeIf(greater, instr->environment());
3663
3664 // Zero out the sign and the exponent in the input (by shifting
3665 // it to the left) and restore the implicit mantissa bit,
3666 // i.e. convert the input to unsigned int64 shifted left by
3667 // kExponentBits.
3668 ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
3669 // Minus zero has the most significant bit set and the other
3670 // bits cleared.
3671 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
3672 __ psllq(input_reg, HeapNumber::kExponentBits);
3673 __ por(input_reg, xmm_scratch);
3674
3675 // Get the amount to shift the input right in xmm_scratch.
3676 __ neg(result_reg);
3677 __ movd(xmm_scratch, Operand(result_reg));
3678
3679 // Shift the input right and extract low 32 bits.
3680 __ psrlq(input_reg, xmm_scratch);
3681 __ movd(Operand(result_reg), input_reg);
3682
3683 // Use the prepared mask in temp_reg to negate the result if necessary.
3684 __ xor_(result_reg, Operand(temp_reg));
3685 __ sub(result_reg, Operand(temp_reg));
3686 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003687 }
3688 } else {
3689 NearLabel done;
3690 __ cvttsd2si(result_reg, Operand(input_reg));
3691 __ cvtsi2sd(xmm0, Operand(result_reg));
3692 __ ucomisd(xmm0, input_reg);
3693 DeoptimizeIf(not_equal, instr->environment());
3694 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3695 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3696 // The integer converted back is equal to the original. We
3697 // only have to test if we got -0 as an input.
3698 __ test(result_reg, Operand(result_reg));
3699 __ j(not_zero, &done);
3700 __ movmskpd(result_reg, input_reg);
3701 // Bit 0 contains the sign of the double in input_reg.
3702 // If input was positive, we are ok and return 0, otherwise
3703 // deoptimize.
3704 __ and_(result_reg, 1);
3705 DeoptimizeIf(not_zero, instr->environment());
3706 }
3707 __ bind(&done);
3708 }
3709}
3710
3711
3712void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003713 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003714 __ test(ToRegister(input), Immediate(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003715 DeoptimizeIf(not_zero, instr->environment());
3716}
3717
3718
3719void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3720 LOperand* input = instr->InputAt(0);
3721 __ test(ToRegister(input), Immediate(kSmiTagMask));
3722 DeoptimizeIf(zero, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003723}
3724
3725
3726void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003727 Register input = ToRegister(instr->InputAt(0));
3728 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003729 InstanceType first = instr->hydrogen()->first();
3730 InstanceType last = instr->hydrogen()->last();
3731
Ben Murdochb0fe1622011-05-05 13:52:32 +01003732 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003733
3734 // If there is only one type in the interval check for equality.
3735 if (first == last) {
Steve Block1e0659c2011-05-24 12:43:12 +01003736 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3737 static_cast<int8_t>(first));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003738 DeoptimizeIf(not_equal, instr->environment());
Steve Block1e0659c2011-05-24 12:43:12 +01003739 } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
3740 // String has a dedicated bit in instance type.
3741 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), kIsNotStringMask);
3742 DeoptimizeIf(not_zero, instr->environment());
3743 } else {
3744 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3745 static_cast<int8_t>(first));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003746 DeoptimizeIf(below, instr->environment());
3747 // Omit check for the last type.
3748 if (last != LAST_TYPE) {
3749 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3750 static_cast<int8_t>(last));
3751 DeoptimizeIf(above, instr->environment());
3752 }
3753 }
3754}
3755
3756
3757void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003758 ASSERT(instr->InputAt(0)->IsRegister());
3759 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003760 __ cmp(reg, instr->hydrogen()->target());
3761 DeoptimizeIf(not_equal, instr->environment());
3762}
3763
3764
3765void LCodeGen::DoCheckMap(LCheckMap* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003766 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003767 ASSERT(input->IsRegister());
3768 Register reg = ToRegister(input);
3769 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3770 instr->hydrogen()->map());
3771 DeoptimizeIf(not_equal, instr->environment());
3772}
3773
3774
Ben Murdochb8e0da22011-05-16 14:20:40 +01003775void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
Steve Block44f0eee2011-05-26 01:26:41 +01003776 if (isolate()->heap()->InNewSpace(*object)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003777 Handle<JSGlobalPropertyCell> cell =
Steve Block44f0eee2011-05-26 01:26:41 +01003778 isolate()->factory()->NewJSGlobalPropertyCell(object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003779 __ mov(result, Operand::Cell(cell));
3780 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003781 __ mov(result, object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003782 }
3783}
3784
3785
3786void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003787 Register reg = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003788
3789 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01003790 Handle<JSObject> current_prototype = instr->prototype();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003791
3792 // Load prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003793 LoadHeapObject(reg, current_prototype);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003794
3795 // Check prototype maps up to the holder.
3796 while (!current_prototype.is_identical_to(holder)) {
3797 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3798 Handle<Map>(current_prototype->map()));
3799 DeoptimizeIf(not_equal, instr->environment());
3800 current_prototype =
3801 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3802 // Load next prototype object.
Ben Murdochb8e0da22011-05-16 14:20:40 +01003803 LoadHeapObject(reg, current_prototype);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003804 }
3805
3806 // Check the holder map.
3807 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3808 Handle<Map>(current_prototype->map()));
3809 DeoptimizeIf(not_equal, instr->environment());
3810}
3811
3812
3813void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3814 // Setup the parameters to the stub/runtime call.
3815 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3816 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3817 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3818 __ push(Immediate(instr->hydrogen()->constant_elements()));
3819
3820 // Pick the right runtime function or stub to call.
3821 int length = instr->hydrogen()->length();
3822 if (instr->hydrogen()->IsCopyOnWrite()) {
3823 ASSERT(instr->hydrogen()->depth() == 1);
3824 FastCloneShallowArrayStub::Mode mode =
3825 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3826 FastCloneShallowArrayStub stub(mode, length);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003827 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003828 } else if (instr->hydrogen()->depth() > 1) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003829 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003830 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003831 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003832 } else {
3833 FastCloneShallowArrayStub::Mode mode =
3834 FastCloneShallowArrayStub::CLONE_ELEMENTS;
3835 FastCloneShallowArrayStub stub(mode, length);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003836 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003837 }
3838}
3839
3840
3841void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003842 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003843 // Setup the parameters to the stub/runtime call.
3844 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3845 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3846 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3847 __ push(Immediate(instr->hydrogen()->constant_properties()));
Steve Block44f0eee2011-05-26 01:26:41 +01003848 int flags = instr->hydrogen()->fast_elements()
3849 ? ObjectLiteral::kFastElements
3850 : ObjectLiteral::kNoFlags;
3851 flags |= instr->hydrogen()->has_function()
3852 ? ObjectLiteral::kHasFunction
3853 : ObjectLiteral::kNoFlags;
3854 __ push(Immediate(Smi::FromInt(flags)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003855
Steve Block9fac8402011-05-12 15:51:54 +01003856 // Pick the right runtime function to call.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003857 if (instr->hydrogen()->depth() > 1) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003858 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr, CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003859 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +01003860 CallRuntime(Runtime::kCreateObjectLiteralShallow,
3861 4,
3862 instr,
3863 CONTEXT_ADJUSTED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003864 }
3865}
3866
3867
Steve Block44f0eee2011-05-26 01:26:41 +01003868void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3869 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
3870 __ push(eax);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003871 CallRuntime(Runtime::kToFastProperties, 1, instr, CONTEXT_ADJUSTED);
Steve Block44f0eee2011-05-26 01:26:41 +01003872}
3873
3874
Ben Murdochb0fe1622011-05-05 13:52:32 +01003875void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3876 NearLabel materialized;
3877 // Registers will be used as follows:
3878 // edi = JS function.
3879 // ecx = literals array.
3880 // ebx = regexp literal.
3881 // eax = regexp literal clone.
3882 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3883 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3884 int literal_offset = FixedArray::kHeaderSize +
3885 instr->hydrogen()->literal_index() * kPointerSize;
3886 __ mov(ebx, FieldOperand(ecx, literal_offset));
Steve Block44f0eee2011-05-26 01:26:41 +01003887 __ cmp(ebx, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003888 __ j(not_equal, &materialized);
3889
3890 // Create regexp literal using runtime function
3891 // Result will be in eax.
3892 __ push(ecx);
3893 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3894 __ push(Immediate(instr->hydrogen()->pattern()));
3895 __ push(Immediate(instr->hydrogen()->flags()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003896 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003897 __ mov(ebx, eax);
3898
3899 __ bind(&materialized);
3900 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3901 Label allocated, runtime_allocate;
3902 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3903 __ jmp(&allocated);
3904
3905 __ bind(&runtime_allocate);
3906 __ push(ebx);
3907 __ push(Immediate(Smi::FromInt(size)));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003908 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003909 __ pop(ebx);
3910
3911 __ bind(&allocated);
3912 // Copy the content into the newly allocated memory.
3913 // (Unroll copy loop once for better throughput).
3914 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3915 __ mov(edx, FieldOperand(ebx, i));
3916 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3917 __ mov(FieldOperand(eax, i), edx);
3918 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3919 }
3920 if ((size % (2 * kPointerSize)) != 0) {
3921 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3922 __ mov(FieldOperand(eax, size - kPointerSize), edx);
3923 }
3924}
3925
3926
3927void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3928 // Use the fast case closure allocation code that allocates in new
3929 // space for nested functions that don't need literals cloning.
3930 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01003931 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01003932 if (!pretenure && shared_info->num_literals() == 0) {
3933 FastNewClosureStub stub(
3934 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003935 __ push(Immediate(shared_info));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003936 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003937 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01003938 __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003939 __ push(Immediate(shared_info));
3940 __ push(Immediate(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01003941 ? factory()->true_value()
3942 : factory()->false_value()));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003943 CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003944 }
3945}
3946
3947
3948void LCodeGen::DoTypeof(LTypeof* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003949 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003950 if (input->IsConstantOperand()) {
3951 __ push(ToImmediate(input));
3952 } else {
3953 __ push(ToOperand(input));
3954 }
Ben Murdoch8b112d22011-06-08 16:22:53 +01003955 CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003956}
3957
3958
3959void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003960 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003961 Register result = ToRegister(instr->result());
3962 Label true_label;
3963 Label false_label;
3964 NearLabel done;
3965
3966 Condition final_branch_condition = EmitTypeofIs(&true_label,
3967 &false_label,
3968 input,
3969 instr->type_literal());
3970 __ j(final_branch_condition, &true_label);
3971 __ bind(&false_label);
Steve Block44f0eee2011-05-26 01:26:41 +01003972 __ mov(result, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003973 __ jmp(&done);
3974
3975 __ bind(&true_label);
Steve Block44f0eee2011-05-26 01:26:41 +01003976 __ mov(result, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003977
3978 __ bind(&done);
3979}
3980
3981
3982void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003983 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003984 int true_block = chunk_->LookupDestination(instr->true_block_id());
3985 int false_block = chunk_->LookupDestination(instr->false_block_id());
3986 Label* true_label = chunk_->GetAssemblyLabel(true_block);
3987 Label* false_label = chunk_->GetAssemblyLabel(false_block);
3988
3989 Condition final_branch_condition = EmitTypeofIs(true_label,
3990 false_label,
3991 input,
3992 instr->type_literal());
3993
3994 EmitBranch(true_block, false_block, final_branch_condition);
3995}
3996
3997
3998Condition LCodeGen::EmitTypeofIs(Label* true_label,
3999 Label* false_label,
4000 Register input,
4001 Handle<String> type_name) {
4002 Condition final_branch_condition = no_condition;
Steve Block44f0eee2011-05-26 01:26:41 +01004003 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004004 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004005 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01004006 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004007 final_branch_condition = equal;
4008
Steve Block44f0eee2011-05-26 01:26:41 +01004009 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004010 __ JumpIfSmi(input, false_label);
4011 __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
4012 __ j(above_equal, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004013 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4014 1 << Map::kIsUndetectable);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004015 final_branch_condition = zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004016
Steve Block44f0eee2011-05-26 01:26:41 +01004017 } else if (type_name->Equals(heap()->boolean_symbol())) {
4018 __ cmp(input, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004019 __ j(equal, true_label);
Steve Block44f0eee2011-05-26 01:26:41 +01004020 __ cmp(input, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004021 final_branch_condition = equal;
4022
Steve Block44f0eee2011-05-26 01:26:41 +01004023 } else if (type_name->Equals(heap()->undefined_symbol())) {
4024 __ cmp(input, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004025 __ j(equal, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004026 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004027 // Check for undetectable objects => true.
4028 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
4029 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4030 1 << Map::kIsUndetectable);
4031 final_branch_condition = not_zero;
4032
Steve Block44f0eee2011-05-26 01:26:41 +01004033 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004034 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004035 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
4036 __ j(equal, true_label);
4037 // Regular expressions => 'function' (they are callable).
4038 __ CmpInstanceType(input, JS_REGEXP_TYPE);
4039 final_branch_condition = equal;
4040
Steve Block44f0eee2011-05-26 01:26:41 +01004041 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004042 __ JumpIfSmi(input, false_label);
Steve Block44f0eee2011-05-26 01:26:41 +01004043 __ cmp(input, factory()->null_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004044 __ j(equal, true_label);
4045 // Regular expressions => 'function', not 'object'.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004046 __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, input);
4047 __ j(below, false_label);
4048 __ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
4049 __ j(above_equal, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004050 // Check for undetectable objects => false.
4051 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4052 1 << Map::kIsUndetectable);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004053 final_branch_condition = zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004054
4055 } else {
4056 final_branch_condition = not_equal;
4057 __ jmp(false_label);
4058 // A dead branch instruction will be generated after this point.
4059 }
4060
4061 return final_branch_condition;
4062}
4063
4064
Steve Block1e0659c2011-05-24 12:43:12 +01004065void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4066 Register result = ToRegister(instr->result());
4067 NearLabel true_label;
4068 NearLabel false_label;
4069 NearLabel done;
4070
4071 EmitIsConstructCall(result);
4072 __ j(equal, &true_label);
4073
Steve Block44f0eee2011-05-26 01:26:41 +01004074 __ mov(result, factory()->false_value());
Steve Block1e0659c2011-05-24 12:43:12 +01004075 __ jmp(&done);
4076
4077 __ bind(&true_label);
Steve Block44f0eee2011-05-26 01:26:41 +01004078 __ mov(result, factory()->true_value());
Steve Block1e0659c2011-05-24 12:43:12 +01004079
4080 __ bind(&done);
4081}
4082
4083
4084void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4085 Register temp = ToRegister(instr->TempAt(0));
4086 int true_block = chunk_->LookupDestination(instr->true_block_id());
4087 int false_block = chunk_->LookupDestination(instr->false_block_id());
4088
4089 EmitIsConstructCall(temp);
4090 EmitBranch(true_block, false_block, equal);
4091}
4092
4093
4094void LCodeGen::EmitIsConstructCall(Register temp) {
4095 // Get the frame pointer for the calling frame.
4096 __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4097
4098 // Skip the arguments adaptor frame if it exists.
4099 NearLabel check_frame_marker;
4100 __ cmp(Operand(temp, StandardFrameConstants::kContextOffset),
4101 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4102 __ j(not_equal, &check_frame_marker);
4103 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
4104
4105 // Check the marker in the calling frame.
4106 __ bind(&check_frame_marker);
4107 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
4108 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
4109}
4110
4111
Ben Murdochb0fe1622011-05-05 13:52:32 +01004112void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4113 // No code for lazy bailout instruction. Used to capture environment after a
4114 // call for populating the safepoint data with deoptimization data.
4115}
4116
4117
4118void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4119 DeoptimizeIf(no_condition, instr->environment());
4120}
4121
4122
4123void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4124 LOperand* obj = instr->object();
4125 LOperand* key = instr->key();
4126 __ push(ToOperand(obj));
4127 if (key->IsConstantOperand()) {
4128 __ push(ToImmediate(key));
4129 } else {
4130 __ push(ToOperand(key));
4131 }
Steve Block1e0659c2011-05-24 12:43:12 +01004132 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4133 LPointerMap* pointers = instr->pointer_map();
4134 LEnvironment* env = instr->deoptimization_environment();
4135 RecordPosition(pointers->position());
4136 RegisterEnvironmentForDeoptimization(env);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004137 // Create safepoint generator that will also ensure enough space in the
4138 // reloc info for patching in deoptimization (since this is invoking a
4139 // builtin)
Ben Murdochb0fe1622011-05-05 13:52:32 +01004140 SafepointGenerator safepoint_generator(this,
Steve Block1e0659c2011-05-24 12:43:12 +01004141 pointers,
Steve Block44f0eee2011-05-26 01:26:41 +01004142 env->deoptimization_index());
Steve Block1e0659c2011-05-24 12:43:12 +01004143 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004144 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004145 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
4146}
4147
4148
4149void LCodeGen::DoStackCheck(LStackCheck* instr) {
4150 // Perform stack overflow check.
4151 NearLabel done;
Steve Block44f0eee2011-05-26 01:26:41 +01004152 ExternalReference stack_limit =
4153 ExternalReference::address_of_stack_limit(isolate());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004154 __ cmp(esp, Operand::StaticVariable(stack_limit));
4155 __ j(above_equal, &done);
4156
4157 StackCheckStub stub;
Ben Murdoch8b112d22011-06-08 16:22:53 +01004158 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004159 __ bind(&done);
4160}
4161
4162
4163void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4164 // This is a pseudo-instruction that ensures that the environment here is
4165 // properly registered for deoptimization and records the assembler's PC
4166 // offset.
4167 LEnvironment* environment = instr->environment();
4168 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4169 instr->SpilledDoubleRegisterArray());
4170
4171 // If the environment were already registered, we would have no way of
4172 // backpatching it with the spill slot operands.
4173 ASSERT(!environment->HasBeenRegistered());
4174 RegisterEnvironmentForDeoptimization(environment);
4175 ASSERT(osr_pc_offset_ == -1);
4176 osr_pc_offset_ = masm()->pc_offset();
4177}
4178
4179
4180#undef __
4181
4182} } // namespace v8::internal
Ben Murdochb8e0da22011-05-16 14:20:40 +01004183
4184#endif // V8_TARGET_ARCH_IA32