blob: d5a4fe661052a39870aed22a9a6ec4b558279ca6 [file] [log] [blame]
Ben Murdoch85b71792012-04-11 18:30:58 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Ben Murdochb8e0da22011-05-16 14:20:40 +010028#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "ia32/lithium-codegen-ia32.h"
33#include "code-stubs.h"
Steve Block44f0eee2011-05-26 01:26:41 +010034#include "deoptimizer.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010035#include "stub-cache.h"
36
37namespace v8 {
38namespace internal {
39
40
Steve Block1e0659c2011-05-24 12:43:12 +010041// When invoking builtins, we need to record the safepoint in the middle of
42// the invoke instruction sequence generated by the macro assembler.
Ben Murdoch257744e2011-11-30 15:57:28 +000043class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010044 public:
45 SafepointGenerator(LCodeGen* codegen,
46 LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +000047 Safepoint::DeoptMode mode)
Ben Murdochb0fe1622011-05-05 13:52:32 +010048 : codegen_(codegen),
49 pointers_(pointers),
Ben Murdoch2b4ba112012-01-20 14:57:15 +000050 deopt_mode_(mode) {}
Ben Murdochb0fe1622011-05-05 13:52:32 +010051 virtual ~SafepointGenerator() { }
52
Ben Murdoch257744e2011-11-30 15:57:28 +000053 virtual void BeforeCall(int call_size) const {}
54
55 virtual void AfterCall() const {
Ben Murdoch2b4ba112012-01-20 14:57:15 +000056 codegen_->RecordSafepoint(pointers_, deopt_mode_);
Ben Murdochb0fe1622011-05-05 13:52:32 +010057 }
58
59 private:
60 LCodeGen* codegen_;
61 LPointerMap* pointers_;
Ben Murdoch2b4ba112012-01-20 14:57:15 +000062 Safepoint::DeoptMode deopt_mode_;
Ben Murdochb0fe1622011-05-05 13:52:32 +010063};
64
65
66#define __ masm()->
67
68bool LCodeGen::GenerateCode() {
Ben Murdoch85b71792012-04-11 18:30:58 +010069 HPhase phase("Code generation", chunk());
Ben Murdochb0fe1622011-05-05 13:52:32 +010070 ASSERT(is_unused());
71 status_ = GENERATING;
72 CpuFeatures::Scope scope(SSE2);
73 return GeneratePrologue() &&
74 GenerateBody() &&
75 GenerateDeferredCode() &&
76 GenerateSafepointTable();
77}
78
79
80void LCodeGen::FinishCode(Handle<Code> code) {
81 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000082 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010083 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010084 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010085 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010086}
87
88
89void LCodeGen::Abort(const char* format, ...) {
90 if (FLAG_trace_bailout) {
Ben Murdoch589d6972011-11-30 16:04:58 +000091 SmartArrayPointer<char> name(
92 info()->shared_info()->DebugName()->ToCString());
Ben Murdoche0cee9b2011-05-25 10:26:03 +010093 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +010094 va_list arguments;
95 va_start(arguments, format);
96 OS::VPrint(format, arguments);
97 va_end(arguments);
98 PrintF("\n");
99 }
100 status_ = ABORTED;
101}
102
103
104void LCodeGen::Comment(const char* format, ...) {
105 if (!FLAG_code_comments) return;
106 char buffer[4 * KB];
107 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
108 va_list arguments;
109 va_start(arguments, format);
110 builder.AddFormattedList(format, arguments);
111 va_end(arguments);
112
113 // Copy the string before recording it in the assembler to avoid
114 // issues when the stack allocated buffer goes out of scope.
115 size_t length = builder.position();
116 Vector<char> copy = Vector<char>::New(length + 1);
117 memcpy(copy.start(), builder.Finalize(), copy.length());
118 masm()->RecordComment(copy.start());
119}
120
121
122bool LCodeGen::GeneratePrologue() {
123 ASSERT(is_generating());
124
125#ifdef DEBUG
126 if (strlen(FLAG_stop_at) > 0 &&
127 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
128 __ int3();
129 }
130#endif
131
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000132 // Strict mode functions and builtins need to replace the receiver
133 // with undefined when called as functions (without an explicit
134 // receiver object). ecx is zero for method calls and non-zero for
135 // function calls.
Ben Murdoch85b71792012-04-11 18:30:58 +0100136 if (info_->is_strict_mode() || info_->is_native()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000137 Label ok;
138 __ test(ecx, Operand(ecx));
139 __ j(zero, &ok, Label::kNear);
140 // +1 for return address.
141 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
142 __ mov(Operand(esp, receiver_offset),
143 Immediate(isolate()->factory()->undefined_value()));
144 __ bind(&ok);
145 }
146
Ben Murdochb0fe1622011-05-05 13:52:32 +0100147 __ push(ebp); // Caller's frame pointer.
148 __ mov(ebp, esp);
149 __ push(esi); // Callee's context.
150 __ push(edi); // Callee's JS function.
151
152 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000153 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100154 if (slots > 0) {
155 if (FLAG_debug_code) {
156 __ mov(Operand(eax), Immediate(slots));
157 Label loop;
158 __ bind(&loop);
159 __ push(Immediate(kSlotsZapValue));
160 __ dec(eax);
161 __ j(not_zero, &loop);
162 } else {
163 __ sub(Operand(esp), Immediate(slots * kPointerSize));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100164#ifdef _MSC_VER
165 // On windows, you may not access the stack more than one page below
166 // the most recently mapped page. To make the allocated area randomly
167 // accessible, we write to each page in turn (the value is irrelevant).
168 const int kPageSize = 4 * KB;
169 for (int offset = slots * kPointerSize - kPageSize;
170 offset > 0;
171 offset -= kPageSize) {
172 __ mov(Operand(esp, offset), eax);
173 }
174#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +0100175 }
176 }
177
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100178 // Possibly allocate a local context.
179 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
180 if (heap_slots > 0) {
181 Comment(";;; Allocate local context");
182 // Argument to NewContext is the function, which is still in edi.
183 __ push(edi);
184 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
185 FastNewContextStub stub(heap_slots);
186 __ CallStub(&stub);
187 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000188 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100189 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000190 RecordSafepoint(Safepoint::kNoLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100191 // Context is returned in both eax and esi. It replaces the context
192 // passed to us. It's saved in the stack and kept live in esi.
193 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
194
195 // Copy parameters into context if necessary.
196 int num_parameters = scope()->num_parameters();
197 for (int i = 0; i < num_parameters; i++) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000198 Variable* var = scope()->parameter(i);
199 if (var->IsContextSlot()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100200 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
201 (num_parameters - 1 - i) * kPointerSize;
202 // Load parameter from stack.
203 __ mov(eax, Operand(ebp, parameter_offset));
204 // Store it in the context.
Ben Murdoch589d6972011-11-30 16:04:58 +0000205 int context_offset = Context::SlotOffset(var->index());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100206 __ mov(Operand(esi, context_offset), eax);
Ben Murdoch85b71792012-04-11 18:30:58 +0100207 // Update the write barrier. This clobbers all involved
208 // registers, so we have to use a third register to avoid
209 // clobbering esi.
210 __ mov(ecx, esi);
211 __ RecordWrite(ecx, context_offset, eax, ebx);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100212 }
213 }
214 Comment(";;; End allocate local context");
215 }
216
Ben Murdochb0fe1622011-05-05 13:52:32 +0100217 // Trace the call.
218 if (FLAG_trace) {
Steve Block1e0659c2011-05-24 12:43:12 +0100219 // We have not executed any compiled code yet, so esi still holds the
220 // incoming context.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100221 __ CallRuntime(Runtime::kTraceEnter, 0);
222 }
223 return !is_aborted();
224}
225
226
227bool LCodeGen::GenerateBody() {
228 ASSERT(is_generating());
229 bool emit_instructions = true;
230 for (current_instruction_ = 0;
231 !is_aborted() && current_instruction_ < instructions_->length();
232 current_instruction_++) {
233 LInstruction* instr = instructions_->at(current_instruction_);
234 if (instr->IsLabel()) {
235 LLabel* label = LLabel::cast(instr);
236 emit_instructions = !label->HasReplacement();
237 }
238
239 if (emit_instructions) {
240 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
241 instr->CompileToNative(this);
242 }
243 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000244 EnsureSpaceForLazyDeopt();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100245 return !is_aborted();
246}
247
248
Ben Murdochb0fe1622011-05-05 13:52:32 +0100249bool LCodeGen::GenerateDeferredCode() {
250 ASSERT(is_generating());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000251 if (deferred_.length() > 0) {
252 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
253 LDeferredCode* code = deferred_[i];
254 __ bind(code->entry());
255 code->Generate();
256 __ jmp(code->exit());
257 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100258 }
259
260 // Deferred code is the last part of the instruction sequence. Mark
261 // the generated code as done unless we bailed out.
262 if (!is_aborted()) status_ = DONE;
263 return !is_aborted();
264}
265
266
267bool LCodeGen::GenerateSafepointTable() {
268 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +0000269 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100270 return !is_aborted();
271}
272
273
274Register LCodeGen::ToRegister(int index) const {
275 return Register::FromAllocationIndex(index);
276}
277
278
279XMMRegister LCodeGen::ToDoubleRegister(int index) const {
280 return XMMRegister::FromAllocationIndex(index);
281}
282
283
284Register LCodeGen::ToRegister(LOperand* op) const {
285 ASSERT(op->IsRegister());
286 return ToRegister(op->index());
287}
288
289
290XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
291 ASSERT(op->IsDoubleRegister());
292 return ToDoubleRegister(op->index());
293}
294
295
296int LCodeGen::ToInteger32(LConstantOperand* op) const {
297 Handle<Object> value = chunk_->LookupLiteral(op);
298 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
299 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
300 value->Number());
301 return static_cast<int32_t>(value->Number());
302}
303
304
Ben Murdoch85b71792012-04-11 18:30:58 +0100305Immediate LCodeGen::ToImmediate(LOperand* op) {
306 LConstantOperand* const_op = LConstantOperand::cast(op);
307 Handle<Object> literal = chunk_->LookupLiteral(const_op);
308 Representation r = chunk_->LookupLiteralRepresentation(const_op);
309 if (r.IsInteger32()) {
310 ASSERT(literal->IsNumber());
311 return Immediate(static_cast<int32_t>(literal->Number()));
312 } else if (r.IsDouble()) {
313 Abort("unsupported double immediate");
314 }
315 ASSERT(r.IsTagged());
316 return Immediate(literal);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100317}
318
319
320Operand LCodeGen::ToOperand(LOperand* op) const {
321 if (op->IsRegister()) return Operand(ToRegister(op));
322 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
323 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
324 int index = op->index();
325 if (index >= 0) {
326 // Local or spill slot. Skip the frame pointer, function, and
327 // context in the fixed part of the frame.
328 return Operand(ebp, -(index + 3) * kPointerSize);
329 } else {
330 // Incoming parameter. Skip the return address.
331 return Operand(ebp, -(index - 1) * kPointerSize);
332 }
333}
334
335
Ben Murdochb8e0da22011-05-16 14:20:40 +0100336Operand LCodeGen::HighOperand(LOperand* op) {
337 ASSERT(op->IsDoubleStackSlot());
338 int index = op->index();
339 int offset = (index >= 0) ? index + 3 : index - 1;
340 return Operand(ebp, -offset * kPointerSize);
341}
342
343
344void LCodeGen::WriteTranslation(LEnvironment* environment,
345 Translation* translation) {
346 if (environment == NULL) return;
347
348 // The translation includes one command per value in the environment.
349 int translation_size = environment->values()->length();
350 // The output frame height does not include the parameters.
351 int height = translation_size - environment->parameter_count();
352
353 WriteTranslation(environment->outer(), translation);
354 int closure_id = DefineDeoptimizationLiteral(environment->closure());
Ben Murdoch85b71792012-04-11 18:30:58 +0100355 translation->BeginFrame(environment->ast_id(), closure_id, height);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100356 for (int i = 0; i < translation_size; ++i) {
357 LOperand* value = environment->values()->at(i);
358 // spilled_registers_ and spilled_double_registers_ are either
359 // both NULL or both set.
360 if (environment->spilled_registers() != NULL && value != NULL) {
361 if (value->IsRegister() &&
362 environment->spilled_registers()[value->index()] != NULL) {
363 translation->MarkDuplicate();
364 AddToTranslation(translation,
365 environment->spilled_registers()[value->index()],
366 environment->HasTaggedValueAt(i));
367 } else if (
368 value->IsDoubleRegister() &&
369 environment->spilled_double_registers()[value->index()] != NULL) {
370 translation->MarkDuplicate();
371 AddToTranslation(
372 translation,
373 environment->spilled_double_registers()[value->index()],
374 false);
375 }
376 }
377
378 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
379 }
380}
381
382
Ben Murdochb0fe1622011-05-05 13:52:32 +0100383void LCodeGen::AddToTranslation(Translation* translation,
384 LOperand* op,
385 bool is_tagged) {
386 if (op == NULL) {
387 // TODO(twuerthinger): Introduce marker operands to indicate that this value
388 // is not present and must be reconstructed from the deoptimizer. Currently
389 // this is only used for the arguments object.
390 translation->StoreArgumentsObject();
391 } else if (op->IsStackSlot()) {
392 if (is_tagged) {
393 translation->StoreStackSlot(op->index());
394 } else {
395 translation->StoreInt32StackSlot(op->index());
396 }
397 } else if (op->IsDoubleStackSlot()) {
398 translation->StoreDoubleStackSlot(op->index());
399 } else if (op->IsArgument()) {
400 ASSERT(is_tagged);
Ben Murdoch257744e2011-11-30 15:57:28 +0000401 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100402 translation->StoreStackSlot(src_index);
403 } else if (op->IsRegister()) {
404 Register reg = ToRegister(op);
405 if (is_tagged) {
406 translation->StoreRegister(reg);
407 } else {
408 translation->StoreInt32Register(reg);
409 }
410 } else if (op->IsDoubleRegister()) {
411 XMMRegister reg = ToDoubleRegister(op);
412 translation->StoreDoubleRegister(reg);
413 } else if (op->IsConstantOperand()) {
414 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
415 int src_index = DefineDeoptimizationLiteral(literal);
416 translation->StoreLiteral(src_index);
417 } else {
418 UNREACHABLE();
419 }
420}
421
422
Ben Murdoch8b112d22011-06-08 16:22:53 +0100423void LCodeGen::CallCodeGeneric(Handle<Code> code,
424 RelocInfo::Mode mode,
425 LInstruction* instr,
Ben Murdoch8b112d22011-06-08 16:22:53 +0100426 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100427 ASSERT(instr != NULL);
428 LPointerMap* pointers = instr->pointer_map();
429 RecordPosition(pointers->position());
Steve Block1e0659c2011-05-24 12:43:12 +0100430 __ call(code, mode);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000431 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100432
433 // Signal that we don't inline smi code before these stubs in the
434 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000435 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdochb0fe1622011-05-05 13:52:32 +0100436 code->kind() == Code::COMPARE_IC) {
437 __ nop();
438 }
439}
440
441
Ben Murdoch8b112d22011-06-08 16:22:53 +0100442void LCodeGen::CallCode(Handle<Code> code,
443 RelocInfo::Mode mode,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000444 LInstruction* instr) {
445 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100446}
447
448
Steve Block44f0eee2011-05-26 01:26:41 +0100449void LCodeGen::CallRuntime(const Runtime::Function* fun,
Steve Block1e0659c2011-05-24 12:43:12 +0100450 int argc,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000451 LInstruction* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100452 ASSERT(instr != NULL);
Steve Block1e0659c2011-05-24 12:43:12 +0100453 ASSERT(instr->HasPointerMap());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100454 LPointerMap* pointers = instr->pointer_map();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100455 RecordPosition(pointers->position());
456
Steve Block1e0659c2011-05-24 12:43:12 +0100457 __ CallRuntime(fun, argc);
Steve Block44f0eee2011-05-26 01:26:41 +0100458
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000459 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100460}
461
462
Ben Murdoch8b112d22011-06-08 16:22:53 +0100463void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
464 int argc,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000465 LInstruction* instr,
466 LOperand* context) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100467 ASSERT(context->IsRegister() || context->IsStackSlot());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000468 if (context->IsRegister()) {
469 if (!ToRegister(context).is(esi)) {
470 __ mov(esi, ToRegister(context));
471 }
Ben Murdoch592a9fc2012-03-05 11:04:45 +0000472 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +0100473 // Context is stack slot.
474 __ mov(esi, ToOperand(context));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000475 }
476
Ben Murdoch8b112d22011-06-08 16:22:53 +0100477 __ CallRuntimeSaveDoubles(id);
478 RecordSafepointWithRegisters(
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000479 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100480}
481
482
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000483void LCodeGen::RegisterEnvironmentForDeoptimization(
484 LEnvironment* environment, Safepoint::DeoptMode mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100485 if (!environment->HasBeenRegistered()) {
486 // Physical stack frame layout:
487 // -x ............. -4 0 ..................................... y
488 // [incoming arguments] [spill slots] [pushed outgoing arguments]
489
490 // Layout of the environment:
491 // 0 ..................................................... size-1
492 // [parameters] [locals] [expression stack including arguments]
493
494 // Layout of the translation:
495 // 0 ........................................................ size - 1 + 4
496 // [expression stack including arguments] [locals] [4 words] [parameters]
497 // |>------------ translation_size ------------<|
498
499 int frame_count = 0;
500 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
501 ++frame_count;
502 }
Ben Murdoch85b71792012-04-11 18:30:58 +0100503 Translation translation(&translations_, frame_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100504 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100505 int deoptimization_index = deoptimizations_.length();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000506 int pc_offset = masm()->pc_offset();
507 environment->Register(deoptimization_index,
508 translation.index(),
509 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100510 deoptimizations_.Add(environment);
511 }
512}
513
514
515void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000516 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100517 ASSERT(environment->HasBeenRegistered());
518 int id = environment->deoptimization_index();
519 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
Ben Murdoch85b71792012-04-11 18:30:58 +0100520 ASSERT(entry != NULL);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100521 if (entry == NULL) {
522 Abort("bailout was not prepared");
523 return;
524 }
525
526 if (FLAG_deopt_every_n_times != 0) {
527 Handle<SharedFunctionInfo> shared(info_->shared_info());
528 Label no_deopt;
529 __ pushfd();
530 __ push(eax);
531 __ push(ebx);
532 __ mov(ebx, shared);
533 __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
534 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000535 __ j(not_zero, &no_deopt, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100536 if (FLAG_trap_on_deopt) __ int3();
537 __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
538 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
539 __ pop(ebx);
540 __ pop(eax);
541 __ popfd();
542 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
543
544 __ bind(&no_deopt);
545 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
546 __ pop(ebx);
547 __ pop(eax);
548 __ popfd();
549 }
550
551 if (cc == no_condition) {
552 if (FLAG_trap_on_deopt) __ int3();
553 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
554 } else {
555 if (FLAG_trap_on_deopt) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000556 Label done;
557 __ j(NegateCondition(cc), &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100558 __ int3();
559 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
560 __ bind(&done);
561 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000562 __ j(cc, entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100563 }
564 }
565}
566
567
568void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
569 int length = deoptimizations_.length();
570 if (length == 0) return;
Ben Murdoch85b71792012-04-11 18:30:58 +0100571 ASSERT(FLAG_deopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100572 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100573 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100574
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100575 Handle<ByteArray> translations = translations_.CreateByteArray();
576 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100577 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
578
579 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100580 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100581 for (int i = 0; i < deoptimization_literals_.length(); i++) {
582 literals->set(i, *deoptimization_literals_[i]);
583 }
584 data->SetLiteralArray(*literals);
585
586 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
587 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
588
589 // Populate the deoptimization entries.
590 for (int i = 0; i < length; i++) {
591 LEnvironment* env = deoptimizations_[i];
592 data->SetAstId(i, Smi::FromInt(env->ast_id()));
593 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
594 data->SetArgumentsStackHeight(i,
595 Smi::FromInt(env->arguments_stack_height()));
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000596 data->SetPc(i, Smi::FromInt(env->pc_offset()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100597 }
598 code->set_deoptimization_data(*data);
599}
600
601
602int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
603 int result = deoptimization_literals_.length();
604 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
605 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
606 }
607 deoptimization_literals_.Add(literal);
608 return result;
609}
610
611
612void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
613 ASSERT(deoptimization_literals_.length() == 0);
614
615 const ZoneList<Handle<JSFunction> >* inlined_closures =
616 chunk()->inlined_closures();
617
618 for (int i = 0, length = inlined_closures->length();
619 i < length;
620 i++) {
621 DefineDeoptimizationLiteral(inlined_closures->at(i));
622 }
623
624 inlined_function_count_ = deoptimization_literals_.length();
625}
626
627
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000628void LCodeGen::RecordSafepointWithLazyDeopt(
629 LInstruction* instr, SafepointMode safepoint_mode) {
630 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
631 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
632 } else {
633 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
634 RecordSafepointWithRegisters(
635 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
636 }
637}
638
639
Steve Block1e0659c2011-05-24 12:43:12 +0100640void LCodeGen::RecordSafepoint(
641 LPointerMap* pointers,
642 Safepoint::Kind kind,
643 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000644 Safepoint::DeoptMode deopt_mode) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100645 ASSERT(kind == expected_safepoint_kind_);
Ben Murdoch85b71792012-04-11 18:30:58 +0100646 const ZoneList<LOperand*>* operands = pointers->operands();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000647 Safepoint safepoint =
648 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100649 for (int i = 0; i < operands->length(); i++) {
650 LOperand* pointer = operands->at(i);
651 if (pointer->IsStackSlot()) {
652 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100653 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
654 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100655 }
656 }
657}
658
659
Steve Block1e0659c2011-05-24 12:43:12 +0100660void LCodeGen::RecordSafepoint(LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000661 Safepoint::DeoptMode mode) {
662 RecordSafepoint(pointers, Safepoint::kSimple, 0, mode);
Steve Block1e0659c2011-05-24 12:43:12 +0100663}
664
665
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000666void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100667 LPointerMap empty_pointers(RelocInfo::kNoPosition);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000668 RecordSafepoint(&empty_pointers, mode);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100669}
670
671
Ben Murdochb0fe1622011-05-05 13:52:32 +0100672void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
673 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000674 Safepoint::DeoptMode mode) {
675 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100676}
677
678
679void LCodeGen::RecordPosition(int position) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000680 if (position == RelocInfo::kNoPosition) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100681 masm()->positions_recorder()->RecordPosition(position);
682}
683
684
685void LCodeGen::DoLabel(LLabel* label) {
686 if (label->is_loop_header()) {
687 Comment(";;; B%d - LOOP entry", label->block_id());
688 } else {
689 Comment(";;; B%d", label->block_id());
690 }
691 __ bind(label->label());
692 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +0000693 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100694}
695
696
697void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100698 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100699}
700
701
702void LCodeGen::DoGap(LGap* gap) {
703 for (int i = LGap::FIRST_INNER_POSITION;
704 i <= LGap::LAST_INNER_POSITION;
705 i++) {
706 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
707 LParallelMove* move = gap->GetParallelMove(inner_pos);
708 if (move != NULL) DoParallelMove(move);
709 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100710}
711
712
Ben Murdoch257744e2011-11-30 15:57:28 +0000713void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
714 DoGap(instr);
715}
716
717
Ben Murdochb0fe1622011-05-05 13:52:32 +0100718void LCodeGen::DoParameter(LParameter* instr) {
719 // Nothing to do.
720}
721
722
723void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +0100724 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100725 ASSERT(ToRegister(instr->result()).is(eax));
726 switch (instr->hydrogen()->major_key()) {
727 case CodeStub::RegExpConstructResult: {
728 RegExpConstructResultStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000729 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100730 break;
731 }
732 case CodeStub::RegExpExec: {
733 RegExpExecStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000734 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100735 break;
736 }
737 case CodeStub::SubString: {
738 SubStringStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000739 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100740 break;
741 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100742 case CodeStub::NumberToString: {
743 NumberToStringStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000744 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100745 break;
746 }
747 case CodeStub::StringAdd: {
748 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000749 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100750 break;
751 }
752 case CodeStub::StringCompare: {
753 StringCompareStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000754 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100755 break;
756 }
757 case CodeStub::TranscendentalCache: {
758 TranscendentalCacheStub stub(instr->transcendental_type(),
759 TranscendentalCacheStub::TAGGED);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000760 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100761 break;
762 }
763 default:
764 UNREACHABLE();
765 }
766}
767
768
769void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
770 // Nothing to do.
771}
772
773
774void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100775 if (instr->hydrogen()->HasPowerOf2Divisor()) {
776 Register dividend = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100777
Steve Block44f0eee2011-05-26 01:26:41 +0100778 int32_t divisor =
779 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100780
Steve Block44f0eee2011-05-26 01:26:41 +0100781 if (divisor < 0) divisor = -divisor;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100782
Ben Murdoch257744e2011-11-30 15:57:28 +0000783 Label positive_dividend, done;
Steve Block44f0eee2011-05-26 01:26:41 +0100784 __ test(dividend, Operand(dividend));
Ben Murdoch257744e2011-11-30 15:57:28 +0000785 __ j(not_sign, &positive_dividend, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100786 __ neg(dividend);
787 __ and_(dividend, divisor - 1);
788 __ neg(dividend);
789 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000790 __ j(not_zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100791 DeoptimizeIf(no_condition, instr->environment());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +0100792 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000793 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100794 }
795 __ bind(&positive_dividend);
796 __ and_(dividend, divisor - 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100797 __ bind(&done);
798 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000799 Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
800 Register left_reg = ToRegister(instr->InputAt(0));
801 Register right_reg = ToRegister(instr->InputAt(1));
802 Register result_reg = ToRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +0100803
Ben Murdoch257744e2011-11-30 15:57:28 +0000804 ASSERT(left_reg.is(eax));
805 ASSERT(result_reg.is(edx));
Steve Block44f0eee2011-05-26 01:26:41 +0100806 ASSERT(!right_reg.is(eax));
807 ASSERT(!right_reg.is(edx));
808
809 // Check for x % 0.
810 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000811 __ test(right_reg, Operand(right_reg));
Steve Block44f0eee2011-05-26 01:26:41 +0100812 DeoptimizeIf(zero, instr->environment());
813 }
814
Ben Murdoch257744e2011-11-30 15:57:28 +0000815 __ test(left_reg, Operand(left_reg));
816 __ j(zero, &remainder_eq_dividend, Label::kNear);
817 __ j(sign, &slow, Label::kNear);
818
819 __ test(right_reg, Operand(right_reg));
820 __ j(not_sign, &both_positive, Label::kNear);
821 // The sign of the divisor doesn't matter.
822 __ neg(right_reg);
823
824 __ bind(&both_positive);
825 // If the dividend is smaller than the nonnegative
826 // divisor, the dividend is the result.
827 __ cmp(left_reg, Operand(right_reg));
828 __ j(less, &remainder_eq_dividend, Label::kNear);
829
830 // Check if the divisor is a PowerOfTwo integer.
831 Register scratch = ToRegister(instr->TempAt(0));
832 __ mov(scratch, right_reg);
833 __ sub(Operand(scratch), Immediate(1));
834 __ test(scratch, Operand(right_reg));
835 __ j(not_zero, &do_subtraction, Label::kNear);
836 __ and_(left_reg, Operand(scratch));
837 __ jmp(&remainder_eq_dividend, Label::kNear);
838
839 __ bind(&do_subtraction);
840 const int kUnfolds = 3;
841 // Try a few subtractions of the dividend.
842 __ mov(scratch, left_reg);
843 for (int i = 0; i < kUnfolds; i++) {
844 // Reduce the dividend by the divisor.
845 __ sub(left_reg, Operand(right_reg));
846 // Check if the dividend is less than the divisor.
847 __ cmp(left_reg, Operand(right_reg));
848 __ j(less, &remainder_eq_dividend, Label::kNear);
849 }
850 __ mov(left_reg, scratch);
851
852 // Slow case, using idiv instruction.
853 __ bind(&slow);
Steve Block44f0eee2011-05-26 01:26:41 +0100854 // Sign extend to edx.
855 __ cdq();
856
857 // Check for (0 % -x) that will produce negative zero.
858 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000859 Label positive_left;
860 Label done;
861 __ test(left_reg, Operand(left_reg));
862 __ j(not_sign, &positive_left, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100863 __ idiv(right_reg);
864
865 // Test the remainder for 0, because then the result would be -0.
Ben Murdoch257744e2011-11-30 15:57:28 +0000866 __ test(result_reg, Operand(result_reg));
867 __ j(not_zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100868
869 DeoptimizeIf(no_condition, instr->environment());
870 __ bind(&positive_left);
871 __ idiv(right_reg);
872 __ bind(&done);
873 } else {
874 __ idiv(right_reg);
875 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000876 __ jmp(&done, Label::kNear);
877
878 __ bind(&remainder_eq_dividend);
879 __ mov(result_reg, left_reg);
880
881 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100882 }
883}
884
885
886void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100887 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100888 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100889 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
890 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
891 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100892
893 Register left_reg = eax;
894
895 // Check for x / 0.
896 Register right_reg = ToRegister(right);
897 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
898 __ test(right_reg, ToOperand(right));
899 DeoptimizeIf(zero, instr->environment());
900 }
901
902 // Check for (0 / -x) that will produce negative zero.
903 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000904 Label left_not_zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100905 __ test(left_reg, Operand(left_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000906 __ j(not_zero, &left_not_zero, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100907 __ test(right_reg, ToOperand(right));
908 DeoptimizeIf(sign, instr->environment());
909 __ bind(&left_not_zero);
910 }
911
912 // Check for (-kMinInt / -1).
913 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000914 Label left_not_min_int;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100915 __ cmp(left_reg, kMinInt);
Ben Murdoch257744e2011-11-30 15:57:28 +0000916 __ j(not_zero, &left_not_min_int, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100917 __ cmp(right_reg, -1);
918 DeoptimizeIf(zero, instr->environment());
919 __ bind(&left_not_min_int);
920 }
921
922 // Sign extend to edx.
923 __ cdq();
924 __ idiv(right_reg);
925
926 // Deoptimize if remainder is not 0.
927 __ test(edx, Operand(edx));
928 DeoptimizeIf(not_zero, instr->environment());
929}
930
931
932void LCodeGen::DoMulI(LMulI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100933 Register left = ToRegister(instr->InputAt(0));
934 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100935
936 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100937 __ mov(ToRegister(instr->TempAt(0)), left);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100938 }
939
940 if (right->IsConstantOperand()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100941 // Try strength reductions on the multiplication.
942 // All replacement instructions are at most as long as the imul
943 // and have better latency.
944 int constant = ToInteger32(LConstantOperand::cast(right));
945 if (constant == -1) {
946 __ neg(left);
947 } else if (constant == 0) {
948 __ xor_(left, Operand(left));
949 } else if (constant == 2) {
950 __ add(left, Operand(left));
951 } else if (!instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
952 // If we know that the multiplication can't overflow, it's safe to
953 // use instructions that don't set the overflow flag for the
954 // multiplication.
955 switch (constant) {
956 case 1:
957 // Do nothing.
958 break;
959 case 3:
960 __ lea(left, Operand(left, left, times_2, 0));
961 break;
962 case 4:
963 __ shl(left, 2);
964 break;
965 case 5:
966 __ lea(left, Operand(left, left, times_4, 0));
967 break;
968 case 8:
969 __ shl(left, 3);
970 break;
971 case 9:
972 __ lea(left, Operand(left, left, times_8, 0));
973 break;
974 case 16:
975 __ shl(left, 4);
976 break;
977 default:
978 __ imul(left, left, constant);
979 break;
980 }
981 } else {
982 __ imul(left, left, constant);
983 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100984 } else {
985 __ imul(left, ToOperand(right));
986 }
987
988 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
989 DeoptimizeIf(overflow, instr->environment());
990 }
991
992 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
993 // Bail out if the result is supposed to be negative zero.
Ben Murdoch257744e2011-11-30 15:57:28 +0000994 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100995 __ test(left, Operand(left));
Ben Murdoch257744e2011-11-30 15:57:28 +0000996 __ j(not_zero, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100997 if (right->IsConstantOperand()) {
Steve Block1e0659c2011-05-24 12:43:12 +0100998 if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100999 DeoptimizeIf(no_condition, instr->environment());
1000 }
1001 } else {
1002 // Test the non-zero operand for negative sign.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001003 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001004 DeoptimizeIf(sign, instr->environment());
1005 }
1006 __ bind(&done);
1007 }
1008}
1009
1010
1011void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001012 LOperand* left = instr->InputAt(0);
1013 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001014 ASSERT(left->Equals(instr->result()));
1015 ASSERT(left->IsRegister());
1016
1017 if (right->IsConstantOperand()) {
1018 int right_operand = ToInteger32(LConstantOperand::cast(right));
1019 switch (instr->op()) {
1020 case Token::BIT_AND:
1021 __ and_(ToRegister(left), right_operand);
1022 break;
1023 case Token::BIT_OR:
1024 __ or_(ToRegister(left), right_operand);
1025 break;
1026 case Token::BIT_XOR:
1027 __ xor_(ToRegister(left), right_operand);
1028 break;
1029 default:
1030 UNREACHABLE();
1031 break;
1032 }
1033 } else {
1034 switch (instr->op()) {
1035 case Token::BIT_AND:
1036 __ and_(ToRegister(left), ToOperand(right));
1037 break;
1038 case Token::BIT_OR:
1039 __ or_(ToRegister(left), ToOperand(right));
1040 break;
1041 case Token::BIT_XOR:
1042 __ xor_(ToRegister(left), ToOperand(right));
1043 break;
1044 default:
1045 UNREACHABLE();
1046 break;
1047 }
1048 }
1049}
1050
1051
1052void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001053 LOperand* left = instr->InputAt(0);
1054 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001055 ASSERT(left->Equals(instr->result()));
1056 ASSERT(left->IsRegister());
1057 if (right->IsRegister()) {
1058 ASSERT(ToRegister(right).is(ecx));
1059
1060 switch (instr->op()) {
1061 case Token::SAR:
1062 __ sar_cl(ToRegister(left));
1063 break;
1064 case Token::SHR:
1065 __ shr_cl(ToRegister(left));
1066 if (instr->can_deopt()) {
1067 __ test(ToRegister(left), Immediate(0x80000000));
1068 DeoptimizeIf(not_zero, instr->environment());
1069 }
1070 break;
1071 case Token::SHL:
1072 __ shl_cl(ToRegister(left));
1073 break;
1074 default:
1075 UNREACHABLE();
1076 break;
1077 }
1078 } else {
1079 int value = ToInteger32(LConstantOperand::cast(right));
1080 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1081 switch (instr->op()) {
1082 case Token::SAR:
1083 if (shift_count != 0) {
1084 __ sar(ToRegister(left), shift_count);
1085 }
1086 break;
1087 case Token::SHR:
1088 if (shift_count == 0 && instr->can_deopt()) {
1089 __ test(ToRegister(left), Immediate(0x80000000));
1090 DeoptimizeIf(not_zero, instr->environment());
1091 } else {
1092 __ shr(ToRegister(left), shift_count);
1093 }
1094 break;
1095 case Token::SHL:
1096 if (shift_count != 0) {
1097 __ shl(ToRegister(left), shift_count);
1098 }
1099 break;
1100 default:
1101 UNREACHABLE();
1102 break;
1103 }
1104 }
1105}
1106
1107
1108void LCodeGen::DoSubI(LSubI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001109 LOperand* left = instr->InputAt(0);
1110 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001111 ASSERT(left->Equals(instr->result()));
1112
1113 if (right->IsConstantOperand()) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001114 __ sub(ToOperand(left), ToImmediate(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001115 } else {
1116 __ sub(ToRegister(left), ToOperand(right));
1117 }
1118 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1119 DeoptimizeIf(overflow, instr->environment());
1120 }
1121}
1122
1123
1124void LCodeGen::DoConstantI(LConstantI* instr) {
1125 ASSERT(instr->result()->IsRegister());
Steve Block9fac8402011-05-12 15:51:54 +01001126 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001127}
1128
1129
1130void LCodeGen::DoConstantD(LConstantD* instr) {
1131 ASSERT(instr->result()->IsDoubleRegister());
1132 XMMRegister res = ToDoubleRegister(instr->result());
1133 double v = instr->value();
1134 // Use xor to produce +0.0 in a fast and compact way, but avoid to
1135 // do so if the constant is -0.0.
1136 if (BitCast<uint64_t, double>(v) == 0) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001137 __ xorps(res, res);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001138 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01001139 Register temp = ToRegister(instr->TempAt(0));
1140 uint64_t int_val = BitCast<uint64_t, double>(v);
1141 int32_t lower = static_cast<int32_t>(int_val);
1142 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001143 if (CpuFeatures::IsSupported(SSE4_1)) {
Steve Block1e0659c2011-05-24 12:43:12 +01001144 CpuFeatures::Scope scope(SSE4_1);
1145 if (lower != 0) {
1146 __ Set(temp, Immediate(lower));
1147 __ movd(res, Operand(temp));
1148 __ Set(temp, Immediate(upper));
1149 __ pinsrd(res, Operand(temp), 1);
1150 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001151 __ xorps(res, res);
Steve Block1e0659c2011-05-24 12:43:12 +01001152 __ Set(temp, Immediate(upper));
1153 __ pinsrd(res, Operand(temp), 1);
1154 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001155 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01001156 __ Set(temp, Immediate(upper));
1157 __ movd(res, Operand(temp));
1158 __ psllq(res, 32);
1159 if (lower != 0) {
1160 __ Set(temp, Immediate(lower));
1161 __ movd(xmm0, Operand(temp));
1162 __ por(res, xmm0);
1163 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001164 }
1165 }
1166}
1167
1168
1169void LCodeGen::DoConstantT(LConstantT* instr) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001170 ASSERT(instr->result()->IsRegister());
1171 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001172}
1173
1174
Steve Block9fac8402011-05-12 15:51:54 +01001175void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001176 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001177 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001178 __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
1179}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001180
Ben Murdochb0fe1622011-05-05 13:52:32 +01001181
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001182void LCodeGen::DoFixedArrayBaseLength(
1183 LFixedArrayBaseLength* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001184 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001185 Register array = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001186 __ mov(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01001187}
1188
1189
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001190void LCodeGen::DoElementsKind(LElementsKind* instr) {
1191 Register result = ToRegister(instr->result());
1192 Register input = ToRegister(instr->InputAt(0));
1193
1194 // Load map into |result|.
1195 __ mov(result, FieldOperand(input, HeapObject::kMapOffset));
1196 // Load the map's "bit field 2" into |result|. We only need the first byte,
1197 // but the following masking takes care of that anyway.
1198 __ mov(result, FieldOperand(result, Map::kBitField2Offset));
1199 // Retrieve elements_kind from bit field 2.
1200 __ and_(result, Map::kElementsKindMask);
1201 __ shr(result, Map::kElementsKindShift);
1202}
1203
1204
Ben Murdochb0fe1622011-05-05 13:52:32 +01001205void LCodeGen::DoValueOf(LValueOf* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001206 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001207 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001208 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001209 ASSERT(input.is(result));
Ben Murdoch257744e2011-11-30 15:57:28 +00001210 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001211 // If the object is a smi return the object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001212 __ JumpIfSmi(input, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001213
1214 // If the object is not a value type, return the object.
1215 __ CmpObjectType(input, JS_VALUE_TYPE, map);
Ben Murdoch257744e2011-11-30 15:57:28 +00001216 __ j(not_equal, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001217 __ mov(result, FieldOperand(input, JSValue::kValueOffset));
1218
1219 __ bind(&done);
1220}
1221
1222
1223void LCodeGen::DoBitNotI(LBitNotI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001224 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001225 ASSERT(input->Equals(instr->result()));
1226 __ not_(ToRegister(input));
1227}
1228
1229
1230void LCodeGen::DoThrow(LThrow* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001231 __ push(ToOperand(instr->value()));
1232 ASSERT(ToRegister(instr->context()).is(esi));
1233 CallRuntime(Runtime::kThrow, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001234
1235 if (FLAG_debug_code) {
1236 Comment("Unreachable code.");
1237 __ int3();
1238 }
1239}
1240
1241
1242void LCodeGen::DoAddI(LAddI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001243 LOperand* left = instr->InputAt(0);
1244 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001245 ASSERT(left->Equals(instr->result()));
1246
1247 if (right->IsConstantOperand()) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001248 __ add(ToOperand(left), ToImmediate(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001249 } else {
1250 __ add(ToRegister(left), ToOperand(right));
1251 }
1252
1253 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1254 DeoptimizeIf(overflow, instr->environment());
1255 }
1256}
1257
1258
1259void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001260 XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1261 XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1262 XMMRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001263 // Modulo uses a fixed result register.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001264 ASSERT(instr->op() == Token::MOD || left.is(result));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001265 switch (instr->op()) {
1266 case Token::ADD:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001267 __ addsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001268 break;
1269 case Token::SUB:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001270 __ subsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001271 break;
1272 case Token::MUL:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001273 __ mulsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001274 break;
1275 case Token::DIV:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001276 __ divsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001277 break;
1278 case Token::MOD: {
1279 // Pass two doubles as arguments on the stack.
1280 __ PrepareCallCFunction(4, eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001281 __ movdbl(Operand(esp, 0 * kDoubleSize), left);
1282 __ movdbl(Operand(esp, 1 * kDoubleSize), right);
Steve Block44f0eee2011-05-26 01:26:41 +01001283 __ CallCFunction(
1284 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1285 4);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001286
1287 // Return value is in st(0) on ia32.
1288 // Store it into the (fixed) result register.
1289 __ sub(Operand(esp), Immediate(kDoubleSize));
1290 __ fstp_d(Operand(esp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001291 __ movdbl(result, Operand(esp, 0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001292 __ add(Operand(esp), Immediate(kDoubleSize));
1293 break;
1294 }
1295 default:
1296 UNREACHABLE();
1297 break;
1298 }
1299}
1300
1301
1302void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001303 ASSERT(ToRegister(instr->context()).is(esi));
1304 ASSERT(ToRegister(instr->left()).is(edx));
1305 ASSERT(ToRegister(instr->right()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001306 ASSERT(ToRegister(instr->result()).is(eax));
1307
Ben Murdoch257744e2011-11-30 15:57:28 +00001308 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001309 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001310 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001311}
1312
1313
1314int LCodeGen::GetNextEmittedBlock(int block) {
1315 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1316 LLabel* label = chunk_->GetLabel(i);
1317 if (!label->HasReplacement()) return i;
1318 }
1319 return -1;
1320}
1321
1322
1323void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1324 int next_block = GetNextEmittedBlock(current_block_);
1325 right_block = chunk_->LookupDestination(right_block);
1326 left_block = chunk_->LookupDestination(left_block);
1327
1328 if (right_block == left_block) {
1329 EmitGoto(left_block);
1330 } else if (left_block == next_block) {
1331 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1332 } else if (right_block == next_block) {
1333 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1334 } else {
1335 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1336 __ jmp(chunk_->GetAssemblyLabel(right_block));
1337 }
1338}
1339
1340
1341void LCodeGen::DoBranch(LBranch* instr) {
1342 int true_block = chunk_->LookupDestination(instr->true_block_id());
1343 int false_block = chunk_->LookupDestination(instr->false_block_id());
1344
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001345 Representation r = instr->hydrogen()->value()->representation();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001346 if (r.IsInteger32()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001347 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001348 __ test(reg, Operand(reg));
1349 EmitBranch(true_block, false_block, not_zero);
1350 } else if (r.IsDouble()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001351 XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001352 __ xorps(xmm0, xmm0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001353 __ ucomisd(reg, xmm0);
1354 EmitBranch(true_block, false_block, not_equal);
1355 } else {
1356 ASSERT(r.IsTagged());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001357 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001358 HType type = instr->hydrogen()->value()->type();
1359 if (type.IsBoolean()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001360 __ cmp(reg, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001361 EmitBranch(true_block, false_block, equal);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001362 } else if (type.IsSmi()) {
1363 __ test(reg, Operand(reg));
1364 EmitBranch(true_block, false_block, not_equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001365 } else {
1366 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1367 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1368
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001369 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1370 // Avoid deopts in the case where we've never executed this path before.
1371 if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001372
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001373 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1374 // undefined -> false.
1375 __ cmp(reg, factory()->undefined_value());
1376 __ j(equal, false_label);
1377 }
1378 if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1379 // true -> true.
1380 __ cmp(reg, factory()->true_value());
1381 __ j(equal, true_label);
1382 // false -> false.
1383 __ cmp(reg, factory()->false_value());
1384 __ j(equal, false_label);
1385 }
1386 if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1387 // 'null' -> false.
1388 __ cmp(reg, factory()->null_value());
1389 __ j(equal, false_label);
1390 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001391
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001392 if (expected.Contains(ToBooleanStub::SMI)) {
1393 // Smis: 0 -> false, all other -> true.
1394 __ test(reg, Operand(reg));
1395 __ j(equal, false_label);
1396 __ JumpIfSmi(reg, true_label);
1397 } else if (expected.NeedsMap()) {
1398 // If we need a map later and have a Smi -> deopt.
1399 __ test(reg, Immediate(kSmiTagMask));
1400 DeoptimizeIf(zero, instr->environment());
1401 }
1402
1403 Register map = no_reg; // Keep the compiler happy.
1404 if (expected.NeedsMap()) {
1405 map = ToRegister(instr->TempAt(0));
1406 ASSERT(!map.is(reg));
1407 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
1408
1409 if (expected.CanBeUndetectable()) {
1410 // Undetectable -> false.
1411 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
1412 1 << Map::kIsUndetectable);
1413 __ j(not_zero, false_label);
1414 }
1415 }
1416
1417 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1418 // spec object -> true.
1419 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
1420 __ j(above_equal, true_label);
1421 }
1422
1423 if (expected.Contains(ToBooleanStub::STRING)) {
1424 // String value -> false iff empty.
1425 Label not_string;
1426 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
1427 __ j(above_equal, &not_string, Label::kNear);
1428 __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
1429 __ j(not_zero, true_label);
1430 __ jmp(false_label);
1431 __ bind(&not_string);
1432 }
1433
1434 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1435 // heap number -> false iff +0, -0, or NaN.
1436 Label not_heap_number;
1437 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1438 factory()->heap_number_map());
1439 __ j(not_equal, &not_heap_number, Label::kNear);
1440 __ fldz();
1441 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1442 __ FCmp();
1443 __ j(zero, false_label);
1444 __ jmp(true_label);
1445 __ bind(&not_heap_number);
1446 }
1447
1448 // We've seen something for the first time -> deopt.
1449 DeoptimizeIf(no_condition, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001450 }
1451 }
1452}
1453
1454
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001455void LCodeGen::EmitGoto(int block) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001456 block = chunk_->LookupDestination(block);
1457 int next_block = GetNextEmittedBlock(current_block_);
1458 if (block != next_block) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001459 __ jmp(chunk_->GetAssemblyLabel(block));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001460 }
1461}
1462
1463
Ben Murdochb0fe1622011-05-05 13:52:32 +01001464void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001465 EmitGoto(instr->block_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001466}
1467
1468
1469Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1470 Condition cond = no_condition;
1471 switch (op) {
1472 case Token::EQ:
1473 case Token::EQ_STRICT:
1474 cond = equal;
1475 break;
1476 case Token::LT:
1477 cond = is_unsigned ? below : less;
1478 break;
1479 case Token::GT:
1480 cond = is_unsigned ? above : greater;
1481 break;
1482 case Token::LTE:
1483 cond = is_unsigned ? below_equal : less_equal;
1484 break;
1485 case Token::GTE:
1486 cond = is_unsigned ? above_equal : greater_equal;
1487 break;
1488 case Token::IN:
1489 case Token::INSTANCEOF:
1490 default:
1491 UNREACHABLE();
1492 }
1493 return cond;
1494}
1495
1496
Ben Murdoch85b71792012-04-11 18:30:58 +01001497void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1498 if (right->IsConstantOperand()) {
1499 __ cmp(ToOperand(left), ToImmediate(right));
1500 } else {
1501 __ cmp(ToRegister(left), ToOperand(right));
1502 }
1503}
1504
1505
Ben Murdochb0fe1622011-05-05 13:52:32 +01001506void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001507 LOperand* left = instr->InputAt(0);
1508 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001509 int false_block = chunk_->LookupDestination(instr->false_block_id());
1510 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001511
Ben Murdoch85b71792012-04-11 18:30:58 +01001512 if (instr->is_double()) {
1513 // Don't base result on EFLAGS when a NaN is involved. Instead
1514 // jump to the false block.
1515 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1516 __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001517 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01001518 EmitCmpI(left, right);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001519 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001520
1521 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1522 EmitBranch(true_block, false_block, cc);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001523}
1524
1525
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001526void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001527 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001528 Operand right = ToOperand(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001529 int false_block = chunk_->LookupDestination(instr->false_block_id());
1530 int true_block = chunk_->LookupDestination(instr->true_block_id());
1531
1532 __ cmp(left, Operand(right));
1533 EmitBranch(true_block, false_block, equal);
1534}
1535
1536
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001537void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001538 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001539 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001540 int false_block = chunk_->LookupDestination(instr->false_block_id());
Ben Murdoch257744e2011-11-30 15:57:28 +00001541
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001542 __ cmp(left, instr->hydrogen()->right());
Ben Murdoch257744e2011-11-30 15:57:28 +00001543 EmitBranch(true_block, false_block, equal);
1544}
1545
1546
Ben Murdoch85b71792012-04-11 18:30:58 +01001547void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001548 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001549
Ben Murdoch85b71792012-04-11 18:30:58 +01001550 // TODO(fsc): If the expression is known to be a smi, then it's
1551 // definitely not null. Jump to the false block.
Ben Murdoch592a9fc2012-03-05 11:04:45 +00001552
1553 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch85b71792012-04-11 18:30:58 +01001554 int false_block = chunk_->LookupDestination(instr->false_block_id());
1555
1556 __ cmp(reg, factory()->null_value());
1557 if (instr->is_strict()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001558 EmitBranch(true_block, false_block, equal);
1559 } else {
1560 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1561 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1562 __ j(equal, true_label);
Ben Murdoch85b71792012-04-11 18:30:58 +01001563 __ cmp(reg, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001564 __ j(equal, true_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001565 __ JumpIfSmi(reg, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001566 // Check for undetectable objects by looking in the bit field in
1567 // the map. The object has already been smi checked.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001568 Register scratch = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001569 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1570 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1571 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1572 EmitBranch(true_block, false_block, not_zero);
1573 }
1574}
1575
1576
1577Condition LCodeGen::EmitIsObject(Register input,
1578 Register temp1,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001579 Label* is_not_object,
1580 Label* is_object) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001581 __ JumpIfSmi(input, is_not_object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001582
Steve Block44f0eee2011-05-26 01:26:41 +01001583 __ cmp(input, isolate()->factory()->null_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001584 __ j(equal, is_object);
1585
1586 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1587 // Undetectable objects behave like undefined.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001588 __ test_b(FieldOperand(temp1, Map::kBitFieldOffset),
1589 1 << Map::kIsUndetectable);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001590 __ j(not_zero, is_not_object);
1591
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001592 __ movzx_b(temp1, FieldOperand(temp1, Map::kInstanceTypeOffset));
1593 __ cmp(temp1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001594 __ j(below, is_not_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001595 __ cmp(temp1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001596 return below_equal;
1597}
1598
1599
Ben Murdochb0fe1622011-05-05 13:52:32 +01001600void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001601 Register reg = ToRegister(instr->InputAt(0));
1602 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001603
1604 int true_block = chunk_->LookupDestination(instr->true_block_id());
1605 int false_block = chunk_->LookupDestination(instr->false_block_id());
1606 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1607 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1608
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001609 Condition true_cond = EmitIsObject(reg, temp, false_label, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001610
1611 EmitBranch(true_block, false_block, true_cond);
1612}
1613
1614
Ben Murdochb0fe1622011-05-05 13:52:32 +01001615void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001616 Operand input = ToOperand(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001617
1618 int true_block = chunk_->LookupDestination(instr->true_block_id());
1619 int false_block = chunk_->LookupDestination(instr->false_block_id());
1620
1621 __ test(input, Immediate(kSmiTagMask));
1622 EmitBranch(true_block, false_block, zero);
1623}
1624
1625
Ben Murdoch257744e2011-11-30 15:57:28 +00001626void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1627 Register input = ToRegister(instr->InputAt(0));
1628 Register temp = ToRegister(instr->TempAt(0));
1629
1630 int true_block = chunk_->LookupDestination(instr->true_block_id());
1631 int false_block = chunk_->LookupDestination(instr->false_block_id());
1632
1633 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001634 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
Ben Murdoch257744e2011-11-30 15:57:28 +00001635 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
1636 __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
1637 1 << Map::kIsUndetectable);
1638 EmitBranch(true_block, false_block, not_zero);
1639}
1640
1641
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001642static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001643 InstanceType from = instr->from();
1644 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001645 if (from == FIRST_TYPE) return to;
1646 ASSERT(from == to || to == LAST_TYPE);
1647 return from;
1648}
1649
1650
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001651static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001652 InstanceType from = instr->from();
1653 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001654 if (from == to) return equal;
1655 if (to == LAST_TYPE) return above_equal;
1656 if (from == FIRST_TYPE) return below_equal;
1657 UNREACHABLE();
1658 return equal;
1659}
1660
1661
Ben Murdochb0fe1622011-05-05 13:52:32 +01001662void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001663 Register input = ToRegister(instr->InputAt(0));
1664 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001665
1666 int true_block = chunk_->LookupDestination(instr->true_block_id());
1667 int false_block = chunk_->LookupDestination(instr->false_block_id());
1668
1669 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1670
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001671 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001672
Ben Murdochb8e0da22011-05-16 14:20:40 +01001673 __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1674 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001675}
1676
1677
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001678void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1679 Register input = ToRegister(instr->InputAt(0));
1680 Register result = ToRegister(instr->result());
1681
1682 if (FLAG_debug_code) {
1683 __ AbortIfNotString(input);
1684 }
1685
1686 __ mov(result, FieldOperand(input, String::kHashFieldOffset));
1687 __ IndexFromHash(result, result);
1688}
1689
1690
Ben Murdochb0fe1622011-05-05 13:52:32 +01001691void LCodeGen::DoHasCachedArrayIndexAndBranch(
1692 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001693 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001694
1695 int true_block = chunk_->LookupDestination(instr->true_block_id());
1696 int false_block = chunk_->LookupDestination(instr->false_block_id());
1697
1698 __ test(FieldOperand(input, String::kHashFieldOffset),
1699 Immediate(String::kContainsCachedArrayIndexMask));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001700 EmitBranch(true_block, false_block, equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001701}
1702
1703
1704// Branches to a label or falls through with the answer in the z flag. Trashes
Ben Murdoch85b71792012-04-11 18:30:58 +01001705// the temp registers, but not the input. Only input and temp2 may alias.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001706void LCodeGen::EmitClassOfTest(Label* is_true,
1707 Label* is_false,
1708 Handle<String>class_name,
1709 Register input,
1710 Register temp,
1711 Register temp2) {
1712 ASSERT(!input.is(temp));
Ben Murdoch85b71792012-04-11 18:30:58 +01001713 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001714 __ JumpIfSmi(input, is_false);
Ben Murdoch85b71792012-04-11 18:30:58 +01001715 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1716 __ j(below, is_false);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001717
Ben Murdoch85b71792012-04-11 18:30:58 +01001718 // Map is now in temp.
1719 // Functions have class 'Function'.
1720 __ CmpInstanceType(temp, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001721 if (class_name->IsEqualTo(CStrVector("Function"))) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001722 __ j(above_equal, is_true);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001723 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01001724 __ j(above_equal, is_false);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001725 }
1726
1727 // Check if the constructor in the map is a function.
1728 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
Ben Murdoch85b71792012-04-11 18:30:58 +01001729
1730 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and
1731 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1732 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1733 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1734 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1735 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
1736
Ben Murdochb0fe1622011-05-05 13:52:32 +01001737 // Objects with a non-function constructor have class 'Object'.
1738 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1739 if (class_name->IsEqualTo(CStrVector("Object"))) {
1740 __ j(not_equal, is_true);
1741 } else {
1742 __ j(not_equal, is_false);
1743 }
1744
1745 // temp now contains the constructor function. Grab the
1746 // instance class name from there.
1747 __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1748 __ mov(temp, FieldOperand(temp,
1749 SharedFunctionInfo::kInstanceClassNameOffset));
1750 // The class name we are testing against is a symbol because it's a literal.
1751 // The name in the constructor is a symbol because of the way the context is
1752 // booted. This routine isn't expected to work for random API-created
1753 // classes and it doesn't have to because you can't access it with natives
1754 // syntax. Since both sides are symbols it is sufficient to use an identity
1755 // comparison.
1756 __ cmp(temp, class_name);
1757 // End with the answer in the z flag.
1758}
1759
1760
Ben Murdochb0fe1622011-05-05 13:52:32 +01001761void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001762 Register input = ToRegister(instr->InputAt(0));
1763 Register temp = ToRegister(instr->TempAt(0));
1764 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdoch85b71792012-04-11 18:30:58 +01001765 if (input.is(temp)) {
1766 // Swap.
1767 Register swapper = temp;
1768 temp = temp2;
1769 temp2 = swapper;
1770 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001771 Handle<String> class_name = instr->hydrogen()->class_name();
1772
1773 int true_block = chunk_->LookupDestination(instr->true_block_id());
1774 int false_block = chunk_->LookupDestination(instr->false_block_id());
1775
1776 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1777 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1778
1779 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1780
1781 EmitBranch(true_block, false_block, equal);
1782}
1783
1784
1785void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001786 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001787 int true_block = instr->true_block_id();
1788 int false_block = instr->false_block_id();
1789
1790 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1791 EmitBranch(true_block, false_block, equal);
1792}
1793
1794
1795void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001796 // Object and function are in fixed registers defined by the stub.
Steve Block1e0659c2011-05-24 12:43:12 +01001797 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001798 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001799 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001800
Ben Murdoch257744e2011-11-30 15:57:28 +00001801 Label true_value, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001802 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00001803 __ j(zero, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001804 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00001805 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001806 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01001807 __ mov(ToRegister(instr->result()), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001808 __ bind(&done);
1809}
1810
1811
Ben Murdoch086aeea2011-05-13 15:57:08 +01001812void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1813 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1814 public:
1815 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1816 LInstanceOfKnownGlobal* instr)
1817 : LDeferredCode(codegen), instr_(instr) { }
1818 virtual void Generate() {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001819 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001820 }
Ben Murdoch85b71792012-04-11 18:30:58 +01001821
Ben Murdoch086aeea2011-05-13 15:57:08 +01001822 Label* map_check() { return &map_check_; }
Ben Murdoch85b71792012-04-11 18:30:58 +01001823
Ben Murdoch086aeea2011-05-13 15:57:08 +01001824 private:
1825 LInstanceOfKnownGlobal* instr_;
1826 Label map_check_;
1827 };
1828
1829 DeferredInstanceOfKnownGlobal* deferred;
1830 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1831
1832 Label done, false_result;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001833 Register object = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001834 Register temp = ToRegister(instr->TempAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001835
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001836 // A Smi is not an instance of anything.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001837 __ JumpIfSmi(object, &false_result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001838
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001839 // This is the inlined call site instanceof cache. The two occurences of the
Ben Murdoch086aeea2011-05-13 15:57:08 +01001840 // hole value will be patched to the last map/result pair generated by the
1841 // instanceof stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00001842 Label cache_miss;
Ben Murdochb8e0da22011-05-16 14:20:40 +01001843 Register map = ToRegister(instr->TempAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001844 __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
1845 __ bind(deferred->map_check()); // Label for calculating code patching.
Ben Murdoch85b71792012-04-11 18:30:58 +01001846 __ cmp(map, factory()->the_hole_value()); // Patched to cached map.
Ben Murdoch257744e2011-11-30 15:57:28 +00001847 __ j(not_equal, &cache_miss, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001848 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001849 __ jmp(&done);
1850
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001851 // The inlined call site cache did not match. Check for null and string
1852 // before calling the deferred code.
Ben Murdoch086aeea2011-05-13 15:57:08 +01001853 __ bind(&cache_miss);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001854 // Null is not an instance of anything.
Steve Block44f0eee2011-05-26 01:26:41 +01001855 __ cmp(object, factory()->null_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01001856 __ j(equal, &false_result);
1857
1858 // String values are not instances of anything.
1859 Condition is_string = masm_->IsObjectStringType(object, temp, temp);
1860 __ j(is_string, &false_result);
1861
1862 // Go to the deferred code.
1863 __ jmp(deferred->entry());
1864
1865 __ bind(&false_result);
Steve Block44f0eee2011-05-26 01:26:41 +01001866 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01001867
1868 // Here result has either true or false. Deferred code also produces true or
1869 // false object.
1870 __ bind(deferred->exit());
1871 __ bind(&done);
1872}
1873
1874
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001875void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1876 Label* map_check) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01001877 PushSafepointRegistersScope scope(this);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001878
1879 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1880 flags = static_cast<InstanceofStub::Flags>(
1881 flags | InstanceofStub::kArgsInRegisters);
1882 flags = static_cast<InstanceofStub::Flags>(
1883 flags | InstanceofStub::kCallSiteInlineCheck);
1884 flags = static_cast<InstanceofStub::Flags>(
1885 flags | InstanceofStub::kReturnTrueFalseObject);
1886 InstanceofStub stub(flags);
1887
Ben Murdoch8b112d22011-06-08 16:22:53 +01001888 // Get the temp register reserved by the instruction. This needs to be a
1889 // register which is pushed last by PushSafepointRegisters as top of the
1890 // stack is used to pass the offset to the location of the map check to
1891 // the stub.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001892 Register temp = ToRegister(instr->TempAt(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001893 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
Ben Murdoch85b71792012-04-11 18:30:58 +01001894 __ mov(InstanceofStub::right(), Immediate(instr->function()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001895 static const int kAdditionalDelta = 13;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001896 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001897 __ mov(temp, Immediate(delta));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001898 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001899 CallCodeGeneric(stub.GetCode(),
1900 RelocInfo::CODE_TARGET,
1901 instr,
Ben Murdoch8b112d22011-06-08 16:22:53 +01001902 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001903 ASSERT(instr->HasDeoptimizationEnvironment());
1904 LEnvironment* env = instr->deoptimization_environment();
1905 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
1906
Ben Murdoch086aeea2011-05-13 15:57:08 +01001907 // Put the result value into the eax slot and restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001908 __ StoreToSafepointRegisterSlot(eax, eax);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001909}
1910
1911
Ben Murdoch85b71792012-04-11 18:30:58 +01001912static Condition ComputeCompareCondition(Token::Value op) {
1913 switch (op) {
1914 case Token::EQ_STRICT:
1915 case Token::EQ:
1916 return equal;
1917 case Token::LT:
1918 return less;
1919 case Token::GT:
1920 return greater;
1921 case Token::LTE:
1922 return less_equal;
1923 case Token::GTE:
1924 return greater_equal;
1925 default:
1926 UNREACHABLE();
1927 return no_condition;
1928 }
1929}
1930
1931
Ben Murdochb0fe1622011-05-05 13:52:32 +01001932void LCodeGen::DoCmpT(LCmpT* instr) {
1933 Token::Value op = instr->op();
1934
1935 Handle<Code> ic = CompareIC::GetUninitialized(op);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001936 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001937
1938 Condition condition = ComputeCompareCondition(op);
Ben Murdoch85b71792012-04-11 18:30:58 +01001939 if (op == Token::GT || op == Token::LTE) {
1940 condition = ReverseCondition(condition);
1941 }
Ben Murdoch257744e2011-11-30 15:57:28 +00001942 Label true_value, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001943 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00001944 __ j(condition, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001945 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00001946 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001947 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01001948 __ mov(ToRegister(instr->result()), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001949 __ bind(&done);
1950}
1951
1952
Ben Murdochb0fe1622011-05-05 13:52:32 +01001953void LCodeGen::DoReturn(LReturn* instr) {
1954 if (FLAG_trace) {
Steve Block1e0659c2011-05-24 12:43:12 +01001955 // Preserve the return value on the stack and rely on the runtime call
1956 // to return the value in the same register. We're leaving the code
1957 // managed by the register allocator and tearing down the frame, it's
1958 // safe to write to the context register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001959 __ push(eax);
Steve Block1e0659c2011-05-24 12:43:12 +01001960 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001961 __ CallRuntime(Runtime::kTraceExit, 1);
1962 }
1963 __ mov(esp, ebp);
1964 __ pop(ebp);
Ben Murdoch257744e2011-11-30 15:57:28 +00001965 __ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001966}
1967
1968
Ben Murdoch8b112d22011-06-08 16:22:53 +01001969void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001970 Register result = ToRegister(instr->result());
1971 __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
Ben Murdoch85b71792012-04-11 18:30:58 +01001972 if (instr->hydrogen()->check_hole_value()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001973 __ cmp(result, factory()->the_hole_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001974 DeoptimizeIf(equal, instr->environment());
1975 }
1976}
1977
1978
Ben Murdoch8b112d22011-06-08 16:22:53 +01001979void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
1980 ASSERT(ToRegister(instr->context()).is(esi));
1981 ASSERT(ToRegister(instr->global_object()).is(eax));
1982 ASSERT(ToRegister(instr->result()).is(eax));
1983
1984 __ mov(ecx, instr->name());
1985 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
1986 RelocInfo::CODE_TARGET_CONTEXT;
1987 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001988 CallCode(ic, mode, instr);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001989}
1990
1991
1992void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Ben Murdoch85b71792012-04-11 18:30:58 +01001993 Register value = ToRegister(instr->InputAt(0));
1994 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell());
Steve Block1e0659c2011-05-24 12:43:12 +01001995
1996 // If the cell we are storing to contains the hole it could have
1997 // been deleted from the property dictionary. In that case, we need
1998 // to update the property details in the property dictionary to mark
1999 // it as no longer deleted. We deoptimize in that case.
Ben Murdoch85b71792012-04-11 18:30:58 +01002000 if (instr->hydrogen()->check_hole_value()) {
2001 __ cmp(cell_operand, factory()->the_hole_value());
Steve Block1e0659c2011-05-24 12:43:12 +01002002 DeoptimizeIf(equal, instr->environment());
2003 }
2004
2005 // Store the value.
Ben Murdoch85b71792012-04-11 18:30:58 +01002006 __ mov(cell_operand, value);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002007}
2008
2009
Ben Murdoch8b112d22011-06-08 16:22:53 +01002010void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2011 ASSERT(ToRegister(instr->context()).is(esi));
2012 ASSERT(ToRegister(instr->global_object()).is(edx));
2013 ASSERT(ToRegister(instr->value()).is(eax));
2014
2015 __ mov(ecx, instr->name());
Ben Murdoch85b71792012-04-11 18:30:58 +01002016 Handle<Code> ic = instr->strict_mode()
Ben Murdoch8b112d22011-06-08 16:22:53 +01002017 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2018 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002019 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002020}
2021
2022
Ben Murdochb8e0da22011-05-16 14:20:40 +01002023void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002024 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002025 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002026 __ mov(result, ContextOperand(context, instr->slot_index()));
2027}
2028
2029
2030void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2031 Register context = ToRegister(instr->context());
2032 Register value = ToRegister(instr->value());
Ben Murdoch85b71792012-04-11 18:30:58 +01002033 __ mov(ContextOperand(context, instr->slot_index()), value);
2034 if (instr->needs_write_barrier()) {
Steve Block1e0659c2011-05-24 12:43:12 +01002035 Register temp = ToRegister(instr->TempAt(0));
2036 int offset = Context::SlotOffset(instr->slot_index());
Ben Murdoch85b71792012-04-11 18:30:58 +01002037 __ RecordWrite(context, offset, value, temp);
Steve Block1e0659c2011-05-24 12:43:12 +01002038 }
Ben Murdochb8e0da22011-05-16 14:20:40 +01002039}
2040
2041
Ben Murdochb0fe1622011-05-05 13:52:32 +01002042void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002043 Register object = ToRegister(instr->object());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002044 Register result = ToRegister(instr->result());
2045 if (instr->hydrogen()->is_in_object()) {
2046 __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
2047 } else {
2048 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2049 __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
2050 }
2051}
2052
2053
Ben Murdoch257744e2011-11-30 15:57:28 +00002054void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2055 Register object,
2056 Handle<Map> type,
2057 Handle<String> name) {
Ben Murdoch85b71792012-04-11 18:30:58 +01002058 LookupResult lookup;
Steve Block44f0eee2011-05-26 01:26:41 +01002059 type->LookupInDescriptors(NULL, *name, &lookup);
Ben Murdoch85b71792012-04-11 18:30:58 +01002060 ASSERT(lookup.IsProperty() &&
Ben Murdoch257744e2011-11-30 15:57:28 +00002061 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2062 if (lookup.type() == FIELD) {
2063 int index = lookup.GetLocalFieldIndexFromMap(*type);
2064 int offset = index * kPointerSize;
2065 if (index < 0) {
2066 // Negative property indices are in-object properties, indexed
2067 // from the end of the fixed part of the object.
2068 __ mov(result, FieldOperand(object, offset + type->instance_size()));
2069 } else {
2070 // Non-negative property indices are in the properties array.
2071 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2072 __ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
2073 }
Steve Block44f0eee2011-05-26 01:26:41 +01002074 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002075 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
Ben Murdoch85b71792012-04-11 18:30:58 +01002076 LoadHeapObject(result, Handle<HeapObject>::cast(function));
Steve Block44f0eee2011-05-26 01:26:41 +01002077 }
2078}
2079
2080
2081void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2082 Register object = ToRegister(instr->object());
2083 Register result = ToRegister(instr->result());
2084
2085 int map_count = instr->hydrogen()->types()->length();
2086 Handle<String> name = instr->hydrogen()->name();
2087 if (map_count == 0) {
2088 ASSERT(instr->hydrogen()->need_generic());
2089 __ mov(ecx, name);
2090 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002091 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01002092 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002093 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002094 for (int i = 0; i < map_count - 1; ++i) {
2095 Handle<Map> map = instr->hydrogen()->types()->at(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00002096 Label next;
Steve Block44f0eee2011-05-26 01:26:41 +01002097 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
Ben Murdoch257744e2011-11-30 15:57:28 +00002098 __ j(not_equal, &next, Label::kNear);
2099 EmitLoadFieldOrConstantFunction(result, object, map, name);
2100 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002101 __ bind(&next);
2102 }
2103 Handle<Map> map = instr->hydrogen()->types()->last();
2104 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2105 if (instr->hydrogen()->need_generic()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002106 Label generic;
2107 __ j(not_equal, &generic, Label::kNear);
2108 EmitLoadFieldOrConstantFunction(result, object, map, name);
2109 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002110 __ bind(&generic);
2111 __ mov(ecx, name);
2112 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002113 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01002114 } else {
2115 DeoptimizeIf(not_equal, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002116 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002117 }
2118 __ bind(&done);
2119 }
2120}
2121
2122
Ben Murdochb0fe1622011-05-05 13:52:32 +01002123void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002124 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002125 ASSERT(ToRegister(instr->object()).is(eax));
2126 ASSERT(ToRegister(instr->result()).is(eax));
2127
2128 __ mov(ecx, instr->name());
Steve Block44f0eee2011-05-26 01:26:41 +01002129 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002130 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002131}
2132
2133
Steve Block9fac8402011-05-12 15:51:54 +01002134void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2135 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002136 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002137 Register result = ToRegister(instr->result());
2138
2139 // Check that the function really is a function.
2140 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
2141 DeoptimizeIf(not_equal, instr->environment());
2142
2143 // Check whether the function has an instance prototype.
Ben Murdoch257744e2011-11-30 15:57:28 +00002144 Label non_instance;
Steve Block9fac8402011-05-12 15:51:54 +01002145 __ test_b(FieldOperand(result, Map::kBitFieldOffset),
2146 1 << Map::kHasNonInstancePrototype);
Ben Murdoch257744e2011-11-30 15:57:28 +00002147 __ j(not_zero, &non_instance, Label::kNear);
Steve Block9fac8402011-05-12 15:51:54 +01002148
2149 // Get the prototype or initial map from the function.
2150 __ mov(result,
2151 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2152
2153 // Check that the function has a prototype or an initial map.
Steve Block44f0eee2011-05-26 01:26:41 +01002154 __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
Steve Block9fac8402011-05-12 15:51:54 +01002155 DeoptimizeIf(equal, instr->environment());
2156
2157 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00002158 Label done;
Steve Block9fac8402011-05-12 15:51:54 +01002159 __ CmpObjectType(result, MAP_TYPE, temp);
Ben Murdoch257744e2011-11-30 15:57:28 +00002160 __ j(not_equal, &done, Label::kNear);
Steve Block9fac8402011-05-12 15:51:54 +01002161
2162 // Get the prototype from the initial map.
2163 __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002164 __ jmp(&done, Label::kNear);
Steve Block9fac8402011-05-12 15:51:54 +01002165
2166 // Non-instance prototype: Fetch prototype from constructor field
2167 // in the function's map.
2168 __ bind(&non_instance);
2169 __ mov(result, FieldOperand(result, Map::kConstructorOffset));
2170
2171 // All done.
2172 __ bind(&done);
2173}
2174
2175
Ben Murdochb0fe1622011-05-05 13:52:32 +01002176void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002177 Register result = ToRegister(instr->result());
2178 Register input = ToRegister(instr->InputAt(0));
2179 __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002180 if (FLAG_debug_code) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002181 Label done, ok, fail;
Steve Block1e0659c2011-05-24 12:43:12 +01002182 __ cmp(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002183 Immediate(factory()->fixed_array_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002184 __ j(equal, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002185 __ cmp(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002186 Immediate(factory()->fixed_cow_array_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002187 __ j(equal, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002188 Register temp((result.is(eax)) ? ebx : eax);
2189 __ push(temp);
2190 __ mov(temp, FieldOperand(result, HeapObject::kMapOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002191 __ movzx_b(temp, FieldOperand(temp, Map::kBitField2Offset));
2192 __ and_(temp, Map::kElementsKindMask);
2193 __ shr(temp, Map::kElementsKindShift);
Ben Murdoch589d6972011-11-30 16:04:58 +00002194 __ cmp(temp, FAST_ELEMENTS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002195 __ j(equal, &ok, Label::kNear);
Ben Murdoch589d6972011-11-30 16:04:58 +00002196 __ cmp(temp, FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002197 __ j(less, &fail, Label::kNear);
Ben Murdoch589d6972011-11-30 16:04:58 +00002198 __ cmp(temp, LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002199 __ j(less_equal, &ok, Label::kNear);
2200 __ bind(&fail);
2201 __ Abort("Check for fast or external elements failed.");
2202 __ bind(&ok);
Steve Block44f0eee2011-05-26 01:26:41 +01002203 __ pop(temp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002204 __ bind(&done);
2205 }
2206}
2207
2208
Steve Block44f0eee2011-05-26 01:26:41 +01002209void LCodeGen::DoLoadExternalArrayPointer(
2210 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002211 Register result = ToRegister(instr->result());
2212 Register input = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002213 __ mov(result, FieldOperand(input,
2214 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002215}
2216
2217
Ben Murdochb0fe1622011-05-05 13:52:32 +01002218void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2219 Register arguments = ToRegister(instr->arguments());
2220 Register length = ToRegister(instr->length());
2221 Operand index = ToOperand(instr->index());
2222 Register result = ToRegister(instr->result());
2223
2224 __ sub(length, index);
2225 DeoptimizeIf(below_equal, instr->environment());
2226
Ben Murdoch086aeea2011-05-13 15:57:08 +01002227 // There are two words between the frame pointer and the last argument.
2228 // Subtracting from length accounts for one of them add one more.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002229 __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2230}
2231
2232
2233void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002234 Register result = ToRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002235
2236 // Load the result.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002237 __ mov(result,
2238 BuildFastArrayOperand(instr->elements(), instr->key(),
Ben Murdoch589d6972011-11-30 16:04:58 +00002239 FAST_ELEMENTS,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002240 FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002241
Ben Murdochb8e0da22011-05-16 14:20:40 +01002242 // Check for the hole value.
Ben Murdoch257744e2011-11-30 15:57:28 +00002243 if (instr->hydrogen()->RequiresHoleCheck()) {
2244 __ cmp(result, factory()->the_hole_value());
2245 DeoptimizeIf(equal, instr->environment());
2246 }
2247}
2248
2249
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002250void LCodeGen::DoLoadKeyedFastDoubleElement(
2251 LLoadKeyedFastDoubleElement* instr) {
2252 XMMRegister result = ToDoubleRegister(instr->result());
2253
Ben Murdoch85b71792012-04-11 18:30:58 +01002254 if (instr->hydrogen()->RequiresHoleCheck()) {
2255 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
2256 sizeof(kHoleNanLower32);
2257 Operand hole_check_operand = BuildFastArrayOperand(
2258 instr->elements(), instr->key(),
2259 FAST_DOUBLE_ELEMENTS,
2260 offset);
2261 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
2262 DeoptimizeIf(equal, instr->environment());
2263 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002264
2265 Operand double_load_operand = BuildFastArrayOperand(
Ben Murdoch589d6972011-11-30 16:04:58 +00002266 instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002267 FixedDoubleArray::kHeaderSize - kHeapObjectTag);
2268 __ movdbl(result, double_load_operand);
2269}
2270
2271
2272Operand LCodeGen::BuildFastArrayOperand(
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002273 LOperand* elements_pointer,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002274 LOperand* key,
Ben Murdoch589d6972011-11-30 16:04:58 +00002275 ElementsKind elements_kind,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002276 uint32_t offset) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002277 Register elements_pointer_reg = ToRegister(elements_pointer);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002278 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00002279 if (key->IsConstantOperand()) {
2280 int constant_value = ToInteger32(LConstantOperand::cast(key));
2281 if (constant_value & 0xF0000000) {
2282 Abort("array index constant value too big");
2283 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002284 return Operand(elements_pointer_reg,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002285 constant_value * (1 << shift_size) + offset);
Ben Murdoch257744e2011-11-30 15:57:28 +00002286 } else {
2287 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002288 return Operand(elements_pointer_reg, ToRegister(key), scale_factor, offset);
Ben Murdoch257744e2011-11-30 15:57:28 +00002289 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002290}
2291
2292
Steve Block44f0eee2011-05-26 01:26:41 +01002293void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2294 LLoadKeyedSpecializedArrayElement* instr) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002295 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002296 Operand operand(BuildFastArrayOperand(instr->external_pointer(),
2297 instr->key(), elements_kind, 0));
Ben Murdoch589d6972011-11-30 16:04:58 +00002298 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Steve Block44f0eee2011-05-26 01:26:41 +01002299 XMMRegister result(ToDoubleRegister(instr->result()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002300 __ movss(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002301 __ cvtss2sd(result, result);
Ben Murdoch589d6972011-11-30 16:04:58 +00002302 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002303 __ movdbl(ToDoubleRegister(instr->result()), operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002304 } else {
2305 Register result(ToRegister(instr->result()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002306 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002307 case EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002308 __ movsx_b(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002309 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002310 case EXTERNAL_PIXEL_ELEMENTS:
2311 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002312 __ movzx_b(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002313 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002314 case EXTERNAL_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002315 __ movsx_w(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002316 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002317 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002318 __ movzx_w(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002319 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002320 case EXTERNAL_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002321 __ mov(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002322 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002323 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002324 __ mov(result, operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002325 __ test(result, Operand(result));
Steve Block44f0eee2011-05-26 01:26:41 +01002326 // TODO(danno): we could be more clever here, perhaps having a special
2327 // version of the stub that detects if the overflow case actually
2328 // happens, and generate code that returns a double rather than int.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002329 DeoptimizeIf(negative, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01002330 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002331 case EXTERNAL_FLOAT_ELEMENTS:
2332 case EXTERNAL_DOUBLE_ELEMENTS:
2333 case FAST_ELEMENTS:
2334 case FAST_DOUBLE_ELEMENTS:
2335 case DICTIONARY_ELEMENTS:
2336 case NON_STRICT_ARGUMENTS_ELEMENTS:
Steve Block44f0eee2011-05-26 01:26:41 +01002337 UNREACHABLE();
2338 break;
2339 }
2340 }
Steve Block1e0659c2011-05-24 12:43:12 +01002341}
2342
2343
Ben Murdochb0fe1622011-05-05 13:52:32 +01002344void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002345 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002346 ASSERT(ToRegister(instr->object()).is(edx));
2347 ASSERT(ToRegister(instr->key()).is(eax));
2348
Steve Block44f0eee2011-05-26 01:26:41 +01002349 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002350 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002351}
2352
2353
2354void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2355 Register result = ToRegister(instr->result());
2356
2357 // Check for arguments adapter frame.
Ben Murdoch257744e2011-11-30 15:57:28 +00002358 Label done, adapted;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002359 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2360 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
2361 __ cmp(Operand(result),
2362 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002363 __ j(equal, &adapted, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002364
2365 // No arguments adaptor frame.
2366 __ mov(result, Operand(ebp));
Ben Murdoch257744e2011-11-30 15:57:28 +00002367 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002368
2369 // Arguments adaptor frame present.
2370 __ bind(&adapted);
2371 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2372
Ben Murdoch086aeea2011-05-13 15:57:08 +01002373 // Result is the frame pointer for the frame if not adapted and for the real
2374 // frame below the adaptor frame if adapted.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002375 __ bind(&done);
2376}
2377
2378
2379void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002380 Operand elem = ToOperand(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002381 Register result = ToRegister(instr->result());
2382
Ben Murdoch257744e2011-11-30 15:57:28 +00002383 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002384
Ben Murdoch086aeea2011-05-13 15:57:08 +01002385 // If no arguments adaptor frame the number of arguments is fixed.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002386 __ cmp(ebp, elem);
2387 __ mov(result, Immediate(scope()->num_parameters()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002388 __ j(equal, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002389
2390 // Arguments adaptor frame present. Get argument length from there.
2391 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2392 __ mov(result, Operand(result,
2393 ArgumentsAdaptorFrameConstants::kLengthOffset));
2394 __ SmiUntag(result);
2395
Ben Murdoch086aeea2011-05-13 15:57:08 +01002396 // Argument length is in result register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002397 __ bind(&done);
2398}
2399
2400
Ben Murdoch85b71792012-04-11 18:30:58 +01002401void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002402 Register receiver = ToRegister(instr->receiver());
Steve Block1e0659c2011-05-24 12:43:12 +01002403 Register function = ToRegister(instr->function());
Ben Murdoch85b71792012-04-11 18:30:58 +01002404 Register length = ToRegister(instr->length());
2405 Register elements = ToRegister(instr->elements());
Steve Block1e0659c2011-05-24 12:43:12 +01002406 Register scratch = ToRegister(instr->TempAt(0));
Ben Murdoch85b71792012-04-11 18:30:58 +01002407 ASSERT(receiver.is(eax)); // Used for parameter count.
2408 ASSERT(function.is(edi)); // Required by InvokeFunction.
2409 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002410
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002411 // If the receiver is null or undefined, we have to pass the global
2412 // object as a receiver to normal functions. Values have to be
2413 // passed unchanged to builtins and strict-mode functions.
Ben Murdoch257744e2011-11-30 15:57:28 +00002414 Label global_object, receiver_ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002415
2416 // Do not transform the receiver to object for strict mode
2417 // functions.
2418 __ mov(scratch,
2419 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2420 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
2421 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2422 __ j(not_equal, &receiver_ok, Label::kNear);
2423
2424 // Do not transform the receiver to object for builtins.
2425 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
2426 1 << SharedFunctionInfo::kNativeBitWithinByte);
2427 __ j(not_equal, &receiver_ok, Label::kNear);
2428
2429 // Normal function. Replace undefined or null with global receiver.
Steve Block44f0eee2011-05-26 01:26:41 +01002430 __ cmp(receiver, factory()->null_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002431 __ j(equal, &global_object, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002432 __ cmp(receiver, factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002433 __ j(equal, &global_object, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002434
2435 // The receiver should be a JS object.
2436 __ test(receiver, Immediate(kSmiTagMask));
2437 DeoptimizeIf(equal, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002438 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01002439 DeoptimizeIf(below, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002440 __ jmp(&receiver_ok, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002441
2442 __ bind(&global_object);
2443 // TODO(kmillikin): We have a hydrogen value for the global object. See
2444 // if it's better to use it than to explicitly fetch it from the context
2445 // here.
2446 __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
2447 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
Ben Murdoch257744e2011-11-30 15:57:28 +00002448 __ mov(receiver,
2449 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002450 __ bind(&receiver_ok);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002451
2452 // Copy the arguments to this function possibly from the
2453 // adaptor frame below it.
2454 const uint32_t kArgumentsLimit = 1 * KB;
2455 __ cmp(length, kArgumentsLimit);
2456 DeoptimizeIf(above, instr->environment());
2457
2458 __ push(receiver);
2459 __ mov(receiver, length);
2460
2461 // Loop through the arguments pushing them onto the execution
2462 // stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00002463 Label invoke, loop;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002464 // length is a small non-negative integer, due to the test above.
2465 __ test(length, Operand(length));
Ben Murdoch257744e2011-11-30 15:57:28 +00002466 __ j(zero, &invoke, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002467 __ bind(&loop);
2468 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2469 __ dec(length);
2470 __ j(not_zero, &loop);
2471
2472 // Invoke the function.
2473 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002474 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2475 LPointerMap* pointers = instr->pointer_map();
Steve Block1e0659c2011-05-24 12:43:12 +01002476 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002477 SafepointGenerator safepoint_generator(
2478 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00002479 ParameterCount actual(eax);
2480 __ InvokeFunction(function, actual, CALL_FUNCTION,
2481 safepoint_generator, CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002482}
2483
2484
2485void LCodeGen::DoPushArgument(LPushArgument* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002486 LOperand* argument = instr->InputAt(0);
Ben Murdoch85b71792012-04-11 18:30:58 +01002487 if (argument->IsConstantOperand()) {
2488 __ push(ToImmediate(argument));
2489 } else {
2490 __ push(ToOperand(argument));
2491 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002492}
2493
2494
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002495void LCodeGen::DoThisFunction(LThisFunction* instr) {
2496 Register result = ToRegister(instr->result());
Ben Murdoch85b71792012-04-11 18:30:58 +01002497 __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002498}
2499
2500
Steve Block1e0659c2011-05-24 12:43:12 +01002501void LCodeGen::DoContext(LContext* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002502 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002503 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
2504}
2505
2506
2507void LCodeGen::DoOuterContext(LOuterContext* instr) {
2508 Register context = ToRegister(instr->context());
2509 Register result = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002510 __ mov(result,
2511 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block1e0659c2011-05-24 12:43:12 +01002512}
2513
2514
2515void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2516 Register context = ToRegister(instr->context());
2517 Register result = ToRegister(instr->result());
2518 __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002519}
2520
2521
2522void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002523 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002524 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002525 __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002526}
2527
2528
2529void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2530 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00002531 LInstruction* instr,
2532 CallKind call_kind) {
Ben Murdoch85b71792012-04-11 18:30:58 +01002533 // Change context if needed.
2534 bool change_context =
2535 (info()->closure()->context() != function->context()) ||
2536 scope()->contains_with() ||
2537 (scope()->num_heap_slots() > 0);
2538 if (change_context) {
2539 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2540 } else {
2541 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2542 }
2543
2544 // Set eax to arguments count if adaption is not needed. Assumes that eax
2545 // is available to write to at this point.
2546 if (!function->NeedsArgumentsAdaption()) {
2547 __ mov(eax, arity);
2548 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002549
2550 LPointerMap* pointers = instr->pointer_map();
2551 RecordPosition(pointers->position());
2552
Ben Murdoch85b71792012-04-11 18:30:58 +01002553 // Invoke function.
2554 __ SetCallKind(ecx, call_kind);
2555 if (*function == *info()->closure()) {
2556 __ CallSelf();
Ben Murdochc7cc0282012-03-05 14:35:55 +00002557 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01002558 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
Ben Murdochc7cc0282012-03-05 14:35:55 +00002559 }
Ben Murdoch85b71792012-04-11 18:30:58 +01002560
2561 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002562}
2563
2564
2565void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2566 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdoch85b71792012-04-11 18:30:58 +01002567 __ mov(edi, instr->function());
Ben Murdoch257744e2011-11-30 15:57:28 +00002568 CallKnownFunction(instr->function(),
2569 instr->arity(),
2570 instr,
2571 CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002572}
2573
2574
2575void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002576 Register input_reg = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002577 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002578 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002579 DeoptimizeIf(not_equal, instr->environment());
2580
2581 Label done;
2582 Register tmp = input_reg.is(eax) ? ecx : eax;
2583 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2584
2585 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002586 PushSafepointRegistersScope scope(this);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002587
2588 Label negative;
2589 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002590 // Check the sign of the argument. If the argument is positive, just
2591 // return it. We do not need to patch the stack since |input| and
2592 // |result| are the same register and |input| will be restored
2593 // unchanged by popping safepoint registers.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002594 __ test(tmp, Immediate(HeapNumber::kSignMask));
2595 __ j(not_zero, &negative);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002596 __ jmp(&done);
2597
2598 __ bind(&negative);
2599
2600 Label allocated, slow;
2601 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2602 __ jmp(&allocated);
2603
2604 // Slow case: Call the runtime system to do the number allocation.
2605 __ bind(&slow);
2606
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002607 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0,
2608 instr, instr->context());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002609
Ben Murdochb0fe1622011-05-05 13:52:32 +01002610 // Set the pointer to the new heap number in tmp.
2611 if (!tmp.is(eax)) __ mov(tmp, eax);
2612
2613 // Restore input_reg after call to runtime.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002614 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002615
2616 __ bind(&allocated);
2617 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2618 __ and_(tmp2, ~HeapNumber::kSignMask);
2619 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2620 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2621 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002622 __ StoreToSafepointRegisterSlot(input_reg, tmp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002623
Steve Block1e0659c2011-05-24 12:43:12 +01002624 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002625}
2626
2627
Steve Block1e0659c2011-05-24 12:43:12 +01002628void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002629 Register input_reg = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002630 __ test(input_reg, Operand(input_reg));
2631 Label is_positive;
2632 __ j(not_sign, &is_positive);
2633 __ neg(input_reg);
2634 __ test(input_reg, Operand(input_reg));
2635 DeoptimizeIf(negative, instr->environment());
2636 __ bind(&is_positive);
2637}
2638
2639
Ben Murdochb0fe1622011-05-05 13:52:32 +01002640void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2641 // Class for deferred case.
2642 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2643 public:
2644 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2645 LUnaryMathOperation* instr)
2646 : LDeferredCode(codegen), instr_(instr) { }
2647 virtual void Generate() {
2648 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2649 }
2650 private:
2651 LUnaryMathOperation* instr_;
2652 };
2653
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002654 ASSERT(instr->value()->Equals(instr->result()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002655 Representation r = instr->hydrogen()->value()->representation();
2656
2657 if (r.IsDouble()) {
2658 XMMRegister scratch = xmm0;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002659 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002660 __ xorps(scratch, scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002661 __ subsd(scratch, input_reg);
2662 __ pand(input_reg, scratch);
2663 } else if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01002664 EmitIntegerMathAbs(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002665 } else { // Tagged case.
2666 DeferredMathAbsTaggedHeapNumber* deferred =
2667 new DeferredMathAbsTaggedHeapNumber(this, instr);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002668 Register input_reg = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002669 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002670 __ JumpIfNotSmi(input_reg, deferred->entry());
Steve Block1e0659c2011-05-24 12:43:12 +01002671 EmitIntegerMathAbs(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002672 __ bind(deferred->exit());
2673 }
2674}
2675
2676
2677void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2678 XMMRegister xmm_scratch = xmm0;
2679 Register output_reg = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002680 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002681
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002682 if (CpuFeatures::IsSupported(SSE4_1)) {
2683 CpuFeatures::Scope scope(SSE4_1);
2684 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2685 // Deoptimize on negative zero.
2686 Label non_zero;
2687 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2688 __ ucomisd(input_reg, xmm_scratch);
2689 __ j(not_equal, &non_zero, Label::kNear);
2690 __ movmskpd(output_reg, input_reg);
2691 __ test(output_reg, Immediate(1));
2692 DeoptimizeIf(not_zero, instr->environment());
2693 __ bind(&non_zero);
2694 }
2695 __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
2696 __ cvttsd2si(output_reg, Operand(xmm_scratch));
2697 // Overflow is signalled with minint.
2698 __ cmp(output_reg, 0x80000000u);
2699 DeoptimizeIf(equal, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002700 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002701 Label done;
2702 // Deoptimize on negative numbers.
2703 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2704 __ ucomisd(input_reg, xmm_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002705 DeoptimizeIf(below, instr->environment());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002706
2707 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2708 // Check for negative zero.
2709 Label positive_sign;
2710 __ j(above, &positive_sign, Label::kNear);
2711 __ movmskpd(output_reg, input_reg);
2712 __ test(output_reg, Immediate(1));
2713 DeoptimizeIf(not_zero, instr->environment());
2714 __ Set(output_reg, Immediate(0));
2715 __ jmp(&done, Label::kNear);
2716 __ bind(&positive_sign);
2717 }
2718
2719 // Use truncating instruction (OK because input is positive).
2720 __ cvttsd2si(output_reg, Operand(input_reg));
2721
2722 // Overflow is signalled with minint.
2723 __ cmp(output_reg, 0x80000000u);
2724 DeoptimizeIf(equal, instr->environment());
2725 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002726 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002727}
2728
Ben Murdochb0fe1622011-05-05 13:52:32 +01002729void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2730 XMMRegister xmm_scratch = xmm0;
2731 Register output_reg = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002732 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002733
Ben Murdoch257744e2011-11-30 15:57:28 +00002734 Label below_half, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002735 // xmm_scratch = 0.5
2736 ExternalReference one_half = ExternalReference::address_of_one_half();
2737 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
Ben Murdoch257744e2011-11-30 15:57:28 +00002738 __ ucomisd(xmm_scratch, input_reg);
2739 __ j(above, &below_half);
Ben Murdoch692be652012-01-10 18:47:50 +00002740 // xmm_scratch = input + 0.5
2741 __ addsd(xmm_scratch, input_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002742
Ben Murdochb0fe1622011-05-05 13:52:32 +01002743 // Compute Math.floor(value + 0.5).
2744 // Use truncating instruction (OK because input is positive).
Ben Murdoch692be652012-01-10 18:47:50 +00002745 __ cvttsd2si(output_reg, Operand(xmm_scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002746
2747 // Overflow is signalled with minint.
2748 __ cmp(output_reg, 0x80000000u);
2749 DeoptimizeIf(equal, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002750 __ jmp(&done);
2751
2752 __ bind(&below_half);
2753
2754 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
2755 // we can ignore the difference between a result of -0 and +0.
2756 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2757 // If the sign is positive, we return +0.
2758 __ movmskpd(output_reg, input_reg);
2759 __ test(output_reg, Immediate(1));
2760 DeoptimizeIf(not_zero, instr->environment());
2761 } else {
2762 // If the input is >= -0.5, we return +0.
2763 __ mov(output_reg, Immediate(0xBF000000));
2764 __ movd(xmm_scratch, Operand(output_reg));
2765 __ cvtss2sd(xmm_scratch, xmm_scratch);
2766 __ ucomisd(input_reg, xmm_scratch);
2767 DeoptimizeIf(below, instr->environment());
2768 }
2769 __ Set(output_reg, Immediate(0));
2770 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002771}
2772
2773
2774void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002775 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002776 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2777 __ sqrtsd(input_reg, input_reg);
2778}
2779
2780
Ben Murdoch85b71792012-04-11 18:30:58 +01002781void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002782 XMMRegister xmm_scratch = xmm0;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002783 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002784 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00002785 __ xorps(xmm_scratch, xmm_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01002786 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002787 __ sqrtsd(input_reg, input_reg);
2788}
2789
2790
2791void LCodeGen::DoPower(LPower* instr) {
Ben Murdoch85b71792012-04-11 18:30:58 +01002792 LOperand* left = instr->InputAt(0);
2793 LOperand* right = instr->InputAt(1);
2794 DoubleRegister result_reg = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002795 Representation exponent_type = instr->hydrogen()->right()->representation();
Steve Block44f0eee2011-05-26 01:26:41 +01002796
Ben Murdoch85b71792012-04-11 18:30:58 +01002797 if (exponent_type.IsDouble()) {
2798 // It is safe to use ebx directly since the instruction is marked
2799 // as a call.
2800 __ PrepareCallCFunction(4, ebx);
2801 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2802 __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
2803 __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
2804 4);
Ben Murdochc7cc0282012-03-05 14:35:55 +00002805 } else if (exponent_type.IsInteger32()) {
Ben Murdoch85b71792012-04-11 18:30:58 +01002806 // It is safe to use ebx directly since the instruction is marked
2807 // as a call.
2808 ASSERT(!ToRegister(right).is(ebx));
2809 __ PrepareCallCFunction(4, ebx);
2810 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2811 __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
2812 __ CallCFunction(ExternalReference::power_double_int_function(isolate()),
2813 4);
Ben Murdochc7cc0282012-03-05 14:35:55 +00002814 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01002815 ASSERT(exponent_type.IsTagged());
2816 CpuFeatures::Scope scope(SSE2);
2817 Register right_reg = ToRegister(right);
2818
2819 Label non_smi, call;
2820 __ JumpIfNotSmi(right_reg, &non_smi);
2821 __ SmiUntag(right_reg);
2822 __ cvtsi2sd(result_reg, Operand(right_reg));
2823 __ jmp(&call);
2824
2825 __ bind(&non_smi);
2826 // It is safe to use ebx directly since the instruction is marked
2827 // as a call.
2828 ASSERT(!right_reg.is(ebx));
2829 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx);
2830 DeoptimizeIf(not_equal, instr->environment());
2831 __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset));
2832
2833 __ bind(&call);
2834 __ PrepareCallCFunction(4, ebx);
2835 __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2836 __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
2837 __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
2838 4);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002839 }
2840
Ben Murdoch85b71792012-04-11 18:30:58 +01002841 // Return value is in st(0) on ia32.
2842 // Store it into the (fixed) result register.
2843 __ sub(Operand(esp), Immediate(kDoubleSize));
2844 __ fstp_d(Operand(esp, 0));
2845 __ movdbl(result_reg, Operand(esp, 0));
2846 __ add(Operand(esp), Immediate(kDoubleSize));
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01002847}
2848
2849
Ben Murdochb0fe1622011-05-05 13:52:32 +01002850void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002851 ASSERT(instr->value()->Equals(instr->result()));
2852 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002853 Label positive, done, zero;
2854 __ xorps(xmm0, xmm0);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002855 __ ucomisd(input_reg, xmm0);
Ben Murdoch257744e2011-11-30 15:57:28 +00002856 __ j(above, &positive, Label::kNear);
2857 __ j(equal, &zero, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002858 ExternalReference nan =
2859 ExternalReference::address_of_canonical_non_hole_nan();
Ben Murdoch8b112d22011-06-08 16:22:53 +01002860 __ movdbl(input_reg, Operand::StaticVariable(nan));
Ben Murdoch257744e2011-11-30 15:57:28 +00002861 __ jmp(&done, Label::kNear);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002862 __ bind(&zero);
2863 __ push(Immediate(0xFFF00000));
2864 __ push(Immediate(0));
2865 __ movdbl(input_reg, Operand(esp, 0));
2866 __ add(Operand(esp), Immediate(kDoubleSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00002867 __ jmp(&done, Label::kNear);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002868 __ bind(&positive);
2869 __ fldln2();
2870 __ sub(Operand(esp), Immediate(kDoubleSize));
2871 __ movdbl(Operand(esp, 0), input_reg);
2872 __ fld_d(Operand(esp, 0));
2873 __ fyl2x();
2874 __ fstp_d(Operand(esp, 0));
2875 __ movdbl(input_reg, Operand(esp, 0));
2876 __ add(Operand(esp), Immediate(kDoubleSize));
2877 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002878}
2879
2880
2881void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2882 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2883 TranscendentalCacheStub stub(TranscendentalCache::COS,
2884 TranscendentalCacheStub::UNTAGGED);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002885 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002886}
2887
2888
2889void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2890 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2891 TranscendentalCacheStub stub(TranscendentalCache::SIN,
2892 TranscendentalCacheStub::UNTAGGED);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002893 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002894}
2895
2896
2897void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2898 switch (instr->op()) {
2899 case kMathAbs:
2900 DoMathAbs(instr);
2901 break;
2902 case kMathFloor:
2903 DoMathFloor(instr);
2904 break;
2905 case kMathRound:
2906 DoMathRound(instr);
2907 break;
2908 case kMathSqrt:
2909 DoMathSqrt(instr);
2910 break;
Ben Murdoch85b71792012-04-11 18:30:58 +01002911 case kMathPowHalf:
2912 DoMathPowHalf(instr);
2913 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002914 case kMathCos:
2915 DoMathCos(instr);
2916 break;
2917 case kMathSin:
2918 DoMathSin(instr);
2919 break;
2920 case kMathLog:
2921 DoMathLog(instr);
2922 break;
2923
2924 default:
2925 UNREACHABLE();
2926 }
2927}
2928
2929
Ben Murdoch257744e2011-11-30 15:57:28 +00002930void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
2931 ASSERT(ToRegister(instr->context()).is(esi));
2932 ASSERT(ToRegister(instr->function()).is(edi));
2933 ASSERT(instr->HasPointerMap());
2934 ASSERT(instr->HasDeoptimizationEnvironment());
2935 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch257744e2011-11-30 15:57:28 +00002936 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002937 SafepointGenerator generator(
2938 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00002939 ParameterCount count(instr->arity());
2940 __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
2941}
2942
2943
Ben Murdochb0fe1622011-05-05 13:52:32 +01002944void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002945 ASSERT(ToRegister(instr->context()).is(esi));
2946 ASSERT(ToRegister(instr->key()).is(ecx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002947 ASSERT(ToRegister(instr->result()).is(eax));
2948
2949 int arity = instr->arity();
Ben Murdoch589d6972011-11-30 16:04:58 +00002950 Handle<Code> ic =
2951 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002952 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002953}
2954
2955
2956void LCodeGen::DoCallNamed(LCallNamed* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002957 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002958 ASSERT(ToRegister(instr->result()).is(eax));
2959
2960 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00002961 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
2962 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00002963 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002964 __ mov(ecx, instr->name());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002965 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002966}
2967
2968
2969void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002970 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002971 ASSERT(ToRegister(instr->result()).is(eax));
2972
2973 int arity = instr->arity();
Ben Murdoch85b71792012-04-11 18:30:58 +01002974 CallFunctionStub stub(arity, RECEIVER_MIGHT_BE_IMPLICIT);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002975 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch85b71792012-04-11 18:30:58 +01002976 __ Drop(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002977}
2978
2979
2980void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002981 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002982 ASSERT(ToRegister(instr->result()).is(eax));
2983
2984 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00002985 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
2986 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00002987 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002988 __ mov(ecx, instr->name());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002989 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002990}
2991
2992
2993void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2994 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdoch85b71792012-04-11 18:30:58 +01002995 __ mov(edi, instr->target());
Ben Murdoch257744e2011-11-30 15:57:28 +00002996 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002997}
2998
2999
3000void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003001 ASSERT(ToRegister(instr->context()).is(esi));
3002 ASSERT(ToRegister(instr->constructor()).is(edi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003003 ASSERT(ToRegister(instr->result()).is(eax));
3004
Ben Murdoch85b71792012-04-11 18:30:58 +01003005 Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003006 __ Set(eax, Immediate(instr->arity()));
Ben Murdoch85b71792012-04-11 18:30:58 +01003007 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003008}
3009
3010
3011void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003012 CallRuntime(instr->function(), instr->arity(), instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003013}
3014
3015
3016void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3017 Register object = ToRegister(instr->object());
3018 Register value = ToRegister(instr->value());
3019 int offset = instr->offset();
3020
3021 if (!instr->transition().is_null()) {
3022 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
3023 }
3024
3025 // Do the store.
3026 if (instr->is_in_object()) {
3027 __ mov(FieldOperand(object, offset), value);
Ben Murdoch85b71792012-04-11 18:30:58 +01003028 if (instr->needs_write_barrier()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003029 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003030 // Update the write barrier for the object for in-object properties.
Ben Murdoch85b71792012-04-11 18:30:58 +01003031 __ RecordWrite(object, offset, value, temp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003032 }
3033 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003034 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003035 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
3036 __ mov(FieldOperand(temp, offset), value);
Ben Murdoch85b71792012-04-11 18:30:58 +01003037 if (instr->needs_write_barrier()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003038 // Update the write barrier for the properties array.
3039 // object is used as a scratch register.
Ben Murdoch85b71792012-04-11 18:30:58 +01003040 __ RecordWrite(temp, offset, value, object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003041 }
3042 }
3043}
3044
3045
3046void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003047 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003048 ASSERT(ToRegister(instr->object()).is(edx));
3049 ASSERT(ToRegister(instr->value()).is(eax));
3050
3051 __ mov(ecx, instr->name());
Ben Murdoch85b71792012-04-11 18:30:58 +01003052 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003053 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3054 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003055 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003056}
3057
3058
3059void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003060 if (instr->index()->IsConstantOperand()) {
3061 __ cmp(ToOperand(instr->length()),
Ben Murdoch85b71792012-04-11 18:30:58 +01003062 ToImmediate(LConstantOperand::cast(instr->index())));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003063 DeoptimizeIf(below_equal, instr->environment());
3064 } else {
3065 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
3066 DeoptimizeIf(above_equal, instr->environment());
3067 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003068}
3069
3070
Steve Block44f0eee2011-05-26 01:26:41 +01003071void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3072 LStoreKeyedSpecializedArrayElement* instr) {
Ben Murdoch589d6972011-11-30 16:04:58 +00003073 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003074 Operand operand(BuildFastArrayOperand(instr->external_pointer(),
3075 instr->key(), elements_kind, 0));
Ben Murdoch589d6972011-11-30 16:04:58 +00003076 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Steve Block44f0eee2011-05-26 01:26:41 +01003077 __ cvtsd2ss(xmm0, ToDoubleRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003078 __ movss(operand, xmm0);
Ben Murdoch589d6972011-11-30 16:04:58 +00003079 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003080 __ movdbl(operand, ToDoubleRegister(instr->value()));
Steve Block44f0eee2011-05-26 01:26:41 +01003081 } else {
3082 Register value = ToRegister(instr->value());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003083 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00003084 case EXTERNAL_PIXEL_ELEMENTS:
3085 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3086 case EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003087 __ mov_b(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01003088 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003089 case EXTERNAL_SHORT_ELEMENTS:
3090 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003091 __ mov_w(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01003092 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003093 case EXTERNAL_INT_ELEMENTS:
3094 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003095 __ mov(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01003096 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003097 case EXTERNAL_FLOAT_ELEMENTS:
3098 case EXTERNAL_DOUBLE_ELEMENTS:
3099 case FAST_ELEMENTS:
3100 case FAST_DOUBLE_ELEMENTS:
3101 case DICTIONARY_ELEMENTS:
3102 case NON_STRICT_ARGUMENTS_ELEMENTS:
Steve Block44f0eee2011-05-26 01:26:41 +01003103 UNREACHABLE();
3104 break;
3105 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003106 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003107}
3108
3109
Ben Murdochb0fe1622011-05-05 13:52:32 +01003110void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3111 Register value = ToRegister(instr->value());
3112 Register elements = ToRegister(instr->object());
3113 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3114
3115 // Do the store.
3116 if (instr->key()->IsConstantOperand()) {
3117 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3118 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3119 int offset =
3120 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3121 __ mov(FieldOperand(elements, offset), value);
3122 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01003123 __ mov(FieldOperand(elements,
3124 key,
3125 times_pointer_size,
3126 FixedArray::kHeaderSize),
Ben Murdochb0fe1622011-05-05 13:52:32 +01003127 value);
3128 }
3129
Ben Murdochb0fe1622011-05-05 13:52:32 +01003130 if (instr->hydrogen()->NeedsWriteBarrier()) {
3131 // Compute address of modified element and store it into key register.
Steve Block1e0659c2011-05-24 12:43:12 +01003132 __ lea(key,
3133 FieldOperand(elements,
3134 key,
3135 times_pointer_size,
3136 FixedArray::kHeaderSize));
Ben Murdoch85b71792012-04-11 18:30:58 +01003137 __ RecordWrite(elements, key, value);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003138 }
3139}
3140
3141
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003142void LCodeGen::DoStoreKeyedFastDoubleElement(
3143 LStoreKeyedFastDoubleElement* instr) {
3144 XMMRegister value = ToDoubleRegister(instr->value());
3145 Label have_value;
3146
3147 __ ucomisd(value, value);
3148 __ j(parity_odd, &have_value); // NaN.
3149
3150 ExternalReference canonical_nan_reference =
3151 ExternalReference::address_of_canonical_non_hole_nan();
3152 __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
3153 __ bind(&have_value);
3154
3155 Operand double_store_operand = BuildFastArrayOperand(
Ben Murdoch589d6972011-11-30 16:04:58 +00003156 instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003157 FixedDoubleArray::kHeaderSize - kHeapObjectTag);
3158 __ movdbl(double_store_operand, value);
3159}
3160
3161
Ben Murdochb0fe1622011-05-05 13:52:32 +01003162void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003163 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003164 ASSERT(ToRegister(instr->object()).is(edx));
3165 ASSERT(ToRegister(instr->key()).is(ecx));
3166 ASSERT(ToRegister(instr->value()).is(eax));
3167
Ben Murdoch85b71792012-04-11 18:30:58 +01003168 Handle<Code> ic = instr->strict_mode()
Steve Block44f0eee2011-05-26 01:26:41 +01003169 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3170 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003171 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003172}
3173
3174
Steve Block1e0659c2011-05-24 12:43:12 +01003175void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3176 class DeferredStringCharCodeAt: public LDeferredCode {
3177 public:
3178 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3179 : LDeferredCode(codegen), instr_(instr) { }
3180 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3181 private:
3182 LStringCharCodeAt* instr_;
3183 };
3184
Ben Murdoch85b71792012-04-11 18:30:58 +01003185 Register string = ToRegister(instr->string());
3186 Register index = ToRegister(instr->index());
3187 Register result = ToRegister(instr->result());
3188
Steve Block1e0659c2011-05-24 12:43:12 +01003189 DeferredStringCharCodeAt* deferred =
3190 new DeferredStringCharCodeAt(this, instr);
3191
Ben Murdoch85b71792012-04-11 18:30:58 +01003192 // Fetch the instance type of the receiver into result register.
3193 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
3194 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
3195
3196 // We need special handling for indirect strings.
3197 Label check_sequential;
3198 __ test(result, Immediate(kIsIndirectStringMask));
3199 __ j(zero, &check_sequential, Label::kNear);
3200
3201 // Dispatch on the indirect string shape: slice or cons.
3202 Label cons_string;
3203 __ test(result, Immediate(kSlicedNotConsMask));
3204 __ j(zero, &cons_string, Label::kNear);
3205
3206 // Handle slices.
3207 Label indirect_string_loaded;
3208 __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
3209 __ SmiUntag(result);
3210 __ add(index, Operand(result));
3211 __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
3212 __ jmp(&indirect_string_loaded, Label::kNear);
3213
3214 // Handle conses.
3215 // Check whether the right hand side is the empty string (i.e. if
3216 // this is really a flat string in a cons string). If that is not
3217 // the case we would rather go to the runtime system now to flatten
3218 // the string.
3219 __ bind(&cons_string);
3220 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
3221 Immediate(factory()->empty_string()));
3222 __ j(not_equal, deferred->entry());
3223 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
3224
3225 __ bind(&indirect_string_loaded);
3226 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
3227 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
3228
3229 // Check whether the string is sequential. The only non-sequential
3230 // shapes we support have just been unwrapped above.
3231 __ bind(&check_sequential);
3232 STATIC_ASSERT(kSeqStringTag == 0);
3233 __ test(result, Immediate(kStringRepresentationMask));
3234 __ j(not_zero, deferred->entry());
3235
3236 // Dispatch on the encoding: ASCII or two-byte.
3237 Label ascii_string;
3238 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
3239 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3240 __ test(result, Immediate(kStringEncodingMask));
3241 __ j(not_zero, &ascii_string, Label::kNear);
3242
3243 // Two-byte string.
3244 // Load the two-byte character code into the result register.
3245 Label done;
3246 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3247 __ movzx_w(result, FieldOperand(string,
3248 index,
3249 times_2,
3250 SeqTwoByteString::kHeaderSize));
3251 __ jmp(&done, Label::kNear);
3252
3253 // ASCII string.
3254 // Load the byte into the result register.
3255 __ bind(&ascii_string);
3256 __ movzx_b(result, FieldOperand(string,
3257 index,
3258 times_1,
3259 SeqAsciiString::kHeaderSize));
3260 __ bind(&done);
Steve Block1e0659c2011-05-24 12:43:12 +01003261 __ bind(deferred->exit());
3262}
3263
3264
3265void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3266 Register string = ToRegister(instr->string());
3267 Register result = ToRegister(instr->result());
3268
3269 // TODO(3095996): Get rid of this. For now, we need to make the
3270 // result register contain a valid pointer because it is already
3271 // contained in the register pointer map.
3272 __ Set(result, Immediate(0));
3273
Ben Murdoch8b112d22011-06-08 16:22:53 +01003274 PushSafepointRegistersScope scope(this);
Steve Block1e0659c2011-05-24 12:43:12 +01003275 __ push(string);
3276 // Push the index as a smi. This is safe because of the checks in
3277 // DoStringCharCodeAt above.
3278 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3279 if (instr->index()->IsConstantOperand()) {
3280 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3281 __ push(Immediate(Smi::FromInt(const_index)));
3282 } else {
3283 Register index = ToRegister(instr->index());
3284 __ SmiTag(index);
3285 __ push(index);
3286 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003287 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2,
3288 instr, instr->context());
Steve Block1e0659c2011-05-24 12:43:12 +01003289 if (FLAG_debug_code) {
3290 __ AbortIfNotSmi(eax);
3291 }
3292 __ SmiUntag(eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003293 __ StoreToSafepointRegisterSlot(result, eax);
Steve Block1e0659c2011-05-24 12:43:12 +01003294}
3295
3296
Steve Block44f0eee2011-05-26 01:26:41 +01003297void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3298 class DeferredStringCharFromCode: public LDeferredCode {
3299 public:
3300 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3301 : LDeferredCode(codegen), instr_(instr) { }
3302 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3303 private:
3304 LStringCharFromCode* instr_;
3305 };
3306
3307 DeferredStringCharFromCode* deferred =
3308 new DeferredStringCharFromCode(this, instr);
3309
3310 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3311 Register char_code = ToRegister(instr->char_code());
3312 Register result = ToRegister(instr->result());
3313 ASSERT(!char_code.is(result));
3314
3315 __ cmp(char_code, String::kMaxAsciiCharCode);
3316 __ j(above, deferred->entry());
3317 __ Set(result, Immediate(factory()->single_character_string_cache()));
3318 __ mov(result, FieldOperand(result,
3319 char_code, times_pointer_size,
3320 FixedArray::kHeaderSize));
3321 __ cmp(result, factory()->undefined_value());
3322 __ j(equal, deferred->entry());
3323 __ bind(deferred->exit());
3324}
3325
3326
3327void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3328 Register char_code = ToRegister(instr->char_code());
3329 Register result = ToRegister(instr->result());
3330
3331 // TODO(3095996): Get rid of this. For now, we need to make the
3332 // result register contain a valid pointer because it is already
3333 // contained in the register pointer map.
3334 __ Set(result, Immediate(0));
3335
Ben Murdoch8b112d22011-06-08 16:22:53 +01003336 PushSafepointRegistersScope scope(this);
Steve Block44f0eee2011-05-26 01:26:41 +01003337 __ SmiTag(char_code);
3338 __ push(char_code);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003339 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
Steve Block44f0eee2011-05-26 01:26:41 +01003340 __ StoreToSafepointRegisterSlot(result, eax);
Steve Block44f0eee2011-05-26 01:26:41 +01003341}
3342
3343
Steve Block1e0659c2011-05-24 12:43:12 +01003344void LCodeGen::DoStringLength(LStringLength* instr) {
3345 Register string = ToRegister(instr->string());
3346 Register result = ToRegister(instr->result());
3347 __ mov(result, FieldOperand(string, String::kLengthOffset));
3348}
3349
3350
Ben Murdoch257744e2011-11-30 15:57:28 +00003351void LCodeGen::DoStringAdd(LStringAdd* instr) {
Ben Murdoch85b71792012-04-11 18:30:58 +01003352 if (instr->left()->IsConstantOperand()) {
3353 __ push(ToImmediate(instr->left()));
3354 } else {
3355 __ push(ToOperand(instr->left()));
3356 }
3357 if (instr->right()->IsConstantOperand()) {
3358 __ push(ToImmediate(instr->right()));
3359 } else {
3360 __ push(ToOperand(instr->right()));
3361 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003362 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003363 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch257744e2011-11-30 15:57:28 +00003364}
3365
3366
Ben Murdochb0fe1622011-05-05 13:52:32 +01003367void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003368 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003369 ASSERT(input->IsRegister() || input->IsStackSlot());
3370 LOperand* output = instr->result();
3371 ASSERT(output->IsDoubleRegister());
3372 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
3373}
3374
3375
3376void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3377 class DeferredNumberTagI: public LDeferredCode {
3378 public:
3379 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3380 : LDeferredCode(codegen), instr_(instr) { }
3381 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3382 private:
3383 LNumberTagI* instr_;
3384 };
3385
Ben Murdochb8e0da22011-05-16 14:20:40 +01003386 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003387 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3388 Register reg = ToRegister(input);
3389
3390 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3391 __ SmiTag(reg);
3392 __ j(overflow, deferred->entry());
3393 __ bind(deferred->exit());
3394}
3395
3396
3397void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3398 Label slow;
Ben Murdochb8e0da22011-05-16 14:20:40 +01003399 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003400 Register tmp = reg.is(eax) ? ecx : eax;
3401
3402 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003403 PushSafepointRegistersScope scope(this);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003404
3405 // There was overflow, so bits 30 and 31 of the original integer
3406 // disagree. Try to allocate a heap number in new space and store
3407 // the value in there. If that fails, call the runtime system.
Ben Murdoch257744e2011-11-30 15:57:28 +00003408 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003409 __ SmiUntag(reg);
3410 __ xor_(reg, 0x80000000);
3411 __ cvtsi2sd(xmm0, Operand(reg));
3412 if (FLAG_inline_new) {
3413 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
Ben Murdoch257744e2011-11-30 15:57:28 +00003414 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003415 }
3416
3417 // Slow case: Call the runtime system to do the number allocation.
3418 __ bind(&slow);
3419
3420 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3421 // register is stored, as this register is in the pointer map, but contains an
3422 // integer value.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003423 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003424 // NumberTagI and NumberTagD use the context from the frame, rather than
3425 // the environment's HContext or HInlinedContext value.
3426 // They only call Runtime::kAllocateHeapNumber.
3427 // The corresponding HChange instructions are added in a phase that does
3428 // not have easy access to the local context.
3429 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3430 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3431 RecordSafepointWithRegisters(
Ben Murdoch2b4ba112012-01-20 14:57:15 +00003432 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003433 if (!reg.is(eax)) __ mov(reg, eax);
3434
3435 // Done. Put the value in xmm0 into the value of the allocated heap
3436 // number.
3437 __ bind(&done);
3438 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003439 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003440}
3441
3442
3443void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3444 class DeferredNumberTagD: public LDeferredCode {
3445 public:
3446 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3447 : LDeferredCode(codegen), instr_(instr) { }
3448 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3449 private:
3450 LNumberTagD* instr_;
3451 };
3452
Ben Murdochb8e0da22011-05-16 14:20:40 +01003453 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003454 Register reg = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01003455 Register tmp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003456
3457 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3458 if (FLAG_inline_new) {
3459 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
3460 } else {
3461 __ jmp(deferred->entry());
3462 }
3463 __ bind(deferred->exit());
3464 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
3465}
3466
3467
3468void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3469 // TODO(3095996): Get rid of this. For now, we need to make the
3470 // result register contain a valid pointer because it is already
3471 // contained in the register pointer map.
3472 Register reg = ToRegister(instr->result());
3473 __ Set(reg, Immediate(0));
3474
Ben Murdoch8b112d22011-06-08 16:22:53 +01003475 PushSafepointRegistersScope scope(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003476 // NumberTagI and NumberTagD use the context from the frame, rather than
3477 // the environment's HContext or HInlinedContext value.
3478 // They only call Runtime::kAllocateHeapNumber.
3479 // The corresponding HChange instructions are added in a phase that does
3480 // not have easy access to the local context.
3481 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3482 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00003483 RecordSafepointWithRegisters(
3484 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003485 __ StoreToSafepointRegisterSlot(reg, eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003486}
3487
3488
3489void LCodeGen::DoSmiTag(LSmiTag* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003490 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003491 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3492 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3493 __ SmiTag(ToRegister(input));
3494}
3495
3496
3497void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003498 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003499 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3500 if (instr->needs_check()) {
3501 __ test(ToRegister(input), Immediate(kSmiTagMask));
3502 DeoptimizeIf(not_zero, instr->environment());
3503 }
3504 __ SmiUntag(ToRegister(input));
3505}
3506
3507
3508void LCodeGen::EmitNumberUntagD(Register input_reg,
3509 XMMRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003510 bool deoptimize_on_undefined,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003511 LEnvironment* env) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003512 Label load_smi, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003513
3514 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003515 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003516
3517 // Heap number map check.
3518 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003519 factory()->heap_number_map());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003520 if (deoptimize_on_undefined) {
3521 DeoptimizeIf(not_equal, env);
3522 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003523 Label heap_number;
3524 __ j(equal, &heap_number, Label::kNear);
3525
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003526 __ cmp(input_reg, factory()->undefined_value());
3527 DeoptimizeIf(not_equal, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003528
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003529 // Convert undefined to NaN.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003530 ExternalReference nan =
3531 ExternalReference::address_of_canonical_non_hole_nan();
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003532 __ movdbl(result_reg, Operand::StaticVariable(nan));
Ben Murdoch257744e2011-11-30 15:57:28 +00003533 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003534
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003535 __ bind(&heap_number);
3536 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003537 // Heap number to XMM conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003538 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003539 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003540
3541 // Smi to XMM conversion
3542 __ bind(&load_smi);
3543 __ SmiUntag(input_reg); // Untag smi before converting to float.
3544 __ cvtsi2sd(result_reg, Operand(input_reg));
3545 __ SmiTag(input_reg); // Retag smi.
3546 __ bind(&done);
3547}
3548
3549
Ben Murdoch85b71792012-04-11 18:30:58 +01003550class DeferredTaggedToI: public LDeferredCode {
3551 public:
3552 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3553 : LDeferredCode(codegen), instr_(instr) { }
3554 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3555 private:
3556 LTaggedToI* instr_;
3557};
3558
3559
Ben Murdochb0fe1622011-05-05 13:52:32 +01003560void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003561 Label done, heap_number;
Ben Murdochb8e0da22011-05-16 14:20:40 +01003562 Register input_reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003563
3564 // Heap number map check.
3565 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003566 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003567
3568 if (instr->truncating()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003569 __ j(equal, &heap_number, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003570 // Check for undefined. Undefined is converted to zero for truncating
3571 // conversions.
Steve Block44f0eee2011-05-26 01:26:41 +01003572 __ cmp(input_reg, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003573 DeoptimizeIf(not_equal, instr->environment());
3574 __ mov(input_reg, 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003575 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003576
3577 __ bind(&heap_number);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003578 if (CpuFeatures::IsSupported(SSE3)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003579 CpuFeatures::Scope scope(SSE3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003580 Label convert;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003581 // Use more powerful conversion when sse3 is available.
3582 // Load x87 register with heap number.
3583 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
3584 // Get exponent alone and check for too-big exponent.
3585 __ mov(input_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3586 __ and_(input_reg, HeapNumber::kExponentMask);
3587 const uint32_t kTooBigExponent =
3588 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3589 __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
Ben Murdoch257744e2011-11-30 15:57:28 +00003590 __ j(less, &convert, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003591 // Pop FPU stack before deoptimizing.
Ben Murdoch85b71792012-04-11 18:30:58 +01003592 __ ffree(0);
3593 __ fincstp();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003594 DeoptimizeIf(no_condition, instr->environment());
3595
3596 // Reserve space for 64 bit answer.
3597 __ bind(&convert);
3598 __ sub(Operand(esp), Immediate(kDoubleSize));
3599 // Do conversion, which cannot fail because we checked the exponent.
3600 __ fisttp_d(Operand(esp, 0));
3601 __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3602 __ add(Operand(esp), Immediate(kDoubleSize));
3603 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003604 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003605 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3606 __ cvttsd2si(input_reg, Operand(xmm0));
3607 __ cmp(input_reg, 0x80000000u);
3608 __ j(not_equal, &done);
3609 // Check if the input was 0x8000000 (kMinInt).
3610 // If no, then we got an overflow and we deoptimize.
3611 ExternalReference min_int = ExternalReference::address_of_min_int();
3612 __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
3613 __ ucomisd(xmm_temp, xmm0);
3614 DeoptimizeIf(not_equal, instr->environment());
3615 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3616 }
3617 } else {
3618 // Deoptimize if we don't have a heap number.
3619 DeoptimizeIf(not_equal, instr->environment());
3620
Ben Murdochb8e0da22011-05-16 14:20:40 +01003621 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003622 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3623 __ cvttsd2si(input_reg, Operand(xmm0));
3624 __ cvtsi2sd(xmm_temp, Operand(input_reg));
3625 __ ucomisd(xmm0, xmm_temp);
3626 DeoptimizeIf(not_equal, instr->environment());
3627 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3628 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3629 __ test(input_reg, Operand(input_reg));
3630 __ j(not_zero, &done);
3631 __ movmskpd(input_reg, xmm0);
3632 __ and_(input_reg, 1);
3633 DeoptimizeIf(not_zero, instr->environment());
3634 }
3635 }
3636 __ bind(&done);
3637}
3638
3639
3640void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003641 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003642 ASSERT(input->IsRegister());
3643 ASSERT(input->Equals(instr->result()));
3644
3645 Register input_reg = ToRegister(input);
3646
3647 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3648
3649 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003650 __ JumpIfNotSmi(input_reg, deferred->entry());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003651
3652 // Smi to int32 conversion
3653 __ SmiUntag(input_reg); // Untag smi.
3654
3655 __ bind(deferred->exit());
3656}
3657
3658
3659void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003660 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003661 ASSERT(input->IsRegister());
3662 LOperand* result = instr->result();
3663 ASSERT(result->IsDoubleRegister());
3664
3665 Register input_reg = ToRegister(input);
3666 XMMRegister result_reg = ToDoubleRegister(result);
3667
Ben Murdoch85b71792012-04-11 18:30:58 +01003668 EmitNumberUntagD(input_reg, result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003669 instr->hydrogen()->deoptimize_on_undefined(),
3670 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003671}
3672
3673
3674void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003675 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003676 ASSERT(input->IsDoubleRegister());
3677 LOperand* result = instr->result();
3678 ASSERT(result->IsRegister());
3679
3680 XMMRegister input_reg = ToDoubleRegister(input);
3681 Register result_reg = ToRegister(result);
3682
3683 if (instr->truncating()) {
3684 // Performs a truncating conversion of a floating point number as used by
3685 // the JS bitwise operations.
3686 __ cvttsd2si(result_reg, Operand(input_reg));
3687 __ cmp(result_reg, 0x80000000u);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003688 if (CpuFeatures::IsSupported(SSE3)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003689 // This will deoptimize if the exponent of the input in out of range.
3690 CpuFeatures::Scope scope(SSE3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003691 Label convert, done;
3692 __ j(not_equal, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003693 __ sub(Operand(esp), Immediate(kDoubleSize));
3694 __ movdbl(Operand(esp, 0), input_reg);
3695 // Get exponent alone and check for too-big exponent.
3696 __ mov(result_reg, Operand(esp, sizeof(int32_t)));
3697 __ and_(result_reg, HeapNumber::kExponentMask);
3698 const uint32_t kTooBigExponent =
3699 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3700 __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
Ben Murdoch257744e2011-11-30 15:57:28 +00003701 __ j(less, &convert, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003702 __ add(Operand(esp), Immediate(kDoubleSize));
3703 DeoptimizeIf(no_condition, instr->environment());
3704 __ bind(&convert);
3705 // Do conversion, which cannot fail because we checked the exponent.
3706 __ fld_d(Operand(esp, 0));
3707 __ fisttp_d(Operand(esp, 0));
3708 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
3709 __ add(Operand(esp), Immediate(kDoubleSize));
3710 __ bind(&done);
3711 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003712 Label done;
Ben Murdochb8e0da22011-05-16 14:20:40 +01003713 Register temp_reg = ToRegister(instr->TempAt(0));
3714 XMMRegister xmm_scratch = xmm0;
3715
3716 // If cvttsd2si succeeded, we're done. Otherwise, we attempt
3717 // manual conversion.
Ben Murdoch257744e2011-11-30 15:57:28 +00003718 __ j(not_equal, &done, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01003719
3720 // Get high 32 bits of the input in result_reg and temp_reg.
3721 __ pshufd(xmm_scratch, input_reg, 1);
3722 __ movd(Operand(temp_reg), xmm_scratch);
3723 __ mov(result_reg, temp_reg);
3724
3725 // Prepare negation mask in temp_reg.
3726 __ sar(temp_reg, kBitsPerInt - 1);
3727
3728 // Extract the exponent from result_reg and subtract adjusted
3729 // bias from it. The adjustment is selected in a way such that
3730 // when the difference is zero, the answer is in the low 32 bits
3731 // of the input, otherwise a shift has to be performed.
3732 __ shr(result_reg, HeapNumber::kExponentShift);
3733 __ and_(result_reg,
3734 HeapNumber::kExponentMask >> HeapNumber::kExponentShift);
3735 __ sub(Operand(result_reg),
3736 Immediate(HeapNumber::kExponentBias +
3737 HeapNumber::kExponentBits +
3738 HeapNumber::kMantissaBits));
3739 // Don't handle big (> kMantissaBits + kExponentBits == 63) or
3740 // special exponents.
3741 DeoptimizeIf(greater, instr->environment());
3742
3743 // Zero out the sign and the exponent in the input (by shifting
3744 // it to the left) and restore the implicit mantissa bit,
3745 // i.e. convert the input to unsigned int64 shifted left by
3746 // kExponentBits.
3747 ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
3748 // Minus zero has the most significant bit set and the other
3749 // bits cleared.
3750 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
3751 __ psllq(input_reg, HeapNumber::kExponentBits);
3752 __ por(input_reg, xmm_scratch);
3753
3754 // Get the amount to shift the input right in xmm_scratch.
3755 __ neg(result_reg);
3756 __ movd(xmm_scratch, Operand(result_reg));
3757
3758 // Shift the input right and extract low 32 bits.
3759 __ psrlq(input_reg, xmm_scratch);
3760 __ movd(Operand(result_reg), input_reg);
3761
3762 // Use the prepared mask in temp_reg to negate the result if necessary.
3763 __ xor_(result_reg, Operand(temp_reg));
3764 __ sub(result_reg, Operand(temp_reg));
3765 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003766 }
3767 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003768 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003769 __ cvttsd2si(result_reg, Operand(input_reg));
3770 __ cvtsi2sd(xmm0, Operand(result_reg));
3771 __ ucomisd(xmm0, input_reg);
3772 DeoptimizeIf(not_equal, instr->environment());
3773 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3774 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3775 // The integer converted back is equal to the original. We
3776 // only have to test if we got -0 as an input.
3777 __ test(result_reg, Operand(result_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00003778 __ j(not_zero, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003779 __ movmskpd(result_reg, input_reg);
3780 // Bit 0 contains the sign of the double in input_reg.
3781 // If input was positive, we are ok and return 0, otherwise
3782 // deoptimize.
3783 __ and_(result_reg, 1);
3784 DeoptimizeIf(not_zero, instr->environment());
3785 }
3786 __ bind(&done);
3787 }
3788}
3789
3790
3791void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003792 LOperand* input = instr->InputAt(0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003793 __ test(ToOperand(input), Immediate(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003794 DeoptimizeIf(not_zero, instr->environment());
3795}
3796
3797
3798void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3799 LOperand* input = instr->InputAt(0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003800 __ test(ToOperand(input), Immediate(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01003801 DeoptimizeIf(zero, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003802}
3803
3804
3805void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003806 Register input = ToRegister(instr->InputAt(0));
3807 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003808
Ben Murdochb0fe1622011-05-05 13:52:32 +01003809 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003810
Ben Murdoch257744e2011-11-30 15:57:28 +00003811 if (instr->hydrogen()->is_interval_check()) {
3812 InstanceType first;
3813 InstanceType last;
3814 instr->hydrogen()->GetCheckInterval(&first, &last);
3815
Steve Block1e0659c2011-05-24 12:43:12 +01003816 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3817 static_cast<int8_t>(first));
Ben Murdoch257744e2011-11-30 15:57:28 +00003818
3819 // If there is only one type in the interval check for equality.
3820 if (first == last) {
3821 DeoptimizeIf(not_equal, instr->environment());
3822 } else {
3823 DeoptimizeIf(below, instr->environment());
3824 // Omit check for the last type.
3825 if (last != LAST_TYPE) {
3826 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3827 static_cast<int8_t>(last));
3828 DeoptimizeIf(above, instr->environment());
3829 }
3830 }
3831 } else {
3832 uint8_t mask;
3833 uint8_t tag;
3834 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
3835
3836 if (IsPowerOf2(mask)) {
3837 ASSERT(tag == 0 || IsPowerOf2(tag));
3838 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask);
3839 DeoptimizeIf(tag == 0 ? not_zero : zero, instr->environment());
3840 } else {
3841 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
3842 __ and_(temp, mask);
Ben Murdoch85b71792012-04-11 18:30:58 +01003843 __ cmpb(Operand(temp), tag);
Ben Murdoch257744e2011-11-30 15:57:28 +00003844 DeoptimizeIf(not_equal, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003845 }
3846 }
3847}
3848
3849
3850void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Ben Murdoch85b71792012-04-11 18:30:58 +01003851 ASSERT(instr->InputAt(0)->IsRegister());
3852 Operand operand = ToOperand(instr->InputAt(0));
3853 __ cmp(operand, instr->hydrogen()->target());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003854 DeoptimizeIf(not_equal, instr->environment());
3855}
3856
3857
3858void LCodeGen::DoCheckMap(LCheckMap* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003859 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003860 ASSERT(input->IsRegister());
3861 Register reg = ToRegister(input);
Ben Murdoch85b71792012-04-11 18:30:58 +01003862 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3863 instr->hydrogen()->map());
3864 DeoptimizeIf(not_equal, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003865}
3866
3867
Ben Murdoch257744e2011-11-30 15:57:28 +00003868void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
3869 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
3870 Register result_reg = ToRegister(instr->result());
3871 __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
3872}
3873
3874
3875void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
3876 ASSERT(instr->unclamped()->Equals(instr->result()));
3877 Register value_reg = ToRegister(instr->result());
3878 __ ClampUint8(value_reg);
3879}
3880
3881
3882void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
3883 ASSERT(instr->unclamped()->Equals(instr->result()));
3884 Register input_reg = ToRegister(instr->unclamped());
3885 Label is_smi, done, heap_number;
3886
3887 __ JumpIfSmi(input_reg, &is_smi);
3888
3889 // Check for heap number
3890 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3891 factory()->heap_number_map());
3892 __ j(equal, &heap_number, Label::kNear);
3893
3894 // Check for undefined. Undefined is converted to zero for clamping
3895 // conversions.
3896 __ cmp(input_reg, factory()->undefined_value());
3897 DeoptimizeIf(not_equal, instr->environment());
3898 __ mov(input_reg, 0);
3899 __ jmp(&done, Label::kNear);
3900
3901 // Heap number
3902 __ bind(&heap_number);
3903 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3904 __ ClampDoubleToUint8(xmm0, xmm1, input_reg);
3905 __ jmp(&done, Label::kNear);
3906
3907 // smi
3908 __ bind(&is_smi);
3909 __ SmiUntag(input_reg);
3910 __ ClampUint8(input_reg);
3911
3912 __ bind(&done);
3913}
3914
3915
Ben Murdoch85b71792012-04-11 18:30:58 +01003916void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
3917 if (isolate()->heap()->InNewSpace(*object)) {
3918 Handle<JSGlobalPropertyCell> cell =
3919 isolate()->factory()->NewJSGlobalPropertyCell(object);
3920 __ mov(result, Operand::Cell(cell));
3921 } else {
3922 __ mov(result, object);
3923 }
3924}
3925
3926
Ben Murdochb0fe1622011-05-05 13:52:32 +01003927void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003928 Register reg = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003929
3930 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01003931 Handle<JSObject> current_prototype = instr->prototype();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003932
3933 // Load prototype object.
Ben Murdoch85b71792012-04-11 18:30:58 +01003934 LoadHeapObject(reg, current_prototype);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003935
3936 // Check prototype maps up to the holder.
3937 while (!current_prototype.is_identical_to(holder)) {
Ben Murdoch85b71792012-04-11 18:30:58 +01003938 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3939 Handle<Map>(current_prototype->map()));
3940 DeoptimizeIf(not_equal, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003941 current_prototype =
3942 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3943 // Load next prototype object.
Ben Murdoch85b71792012-04-11 18:30:58 +01003944 LoadHeapObject(reg, current_prototype);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003945 }
3946
3947 // Check the holder map.
Ben Murdoch85b71792012-04-11 18:30:58 +01003948 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3949 Handle<Map>(current_prototype->map()));
3950 DeoptimizeIf(not_equal, instr->environment());
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01003951}
3952
3953
Ben Murdochb0fe1622011-05-05 13:52:32 +01003954void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003955 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdoch85b71792012-04-11 18:30:58 +01003956 // Setup the parameters to the stub/runtime call.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003957 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3958 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3959 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
Ben Murdoch85b71792012-04-11 18:30:58 +01003960 __ push(Immediate(instr->hydrogen()->constant_elements()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003961
3962 // Pick the right runtime function or stub to call.
3963 int length = instr->hydrogen()->length();
3964 if (instr->hydrogen()->IsCopyOnWrite()) {
3965 ASSERT(instr->hydrogen()->depth() == 1);
3966 FastCloneShallowArrayStub::Mode mode =
3967 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3968 FastCloneShallowArrayStub stub(mode, length);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003969 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003970 } else if (instr->hydrogen()->depth() > 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003971 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003972 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003973 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003974 } else {
3975 FastCloneShallowArrayStub::Mode mode =
Ben Murdoch85b71792012-04-11 18:30:58 +01003976 FastCloneShallowArrayStub::CLONE_ELEMENTS;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003977 FastCloneShallowArrayStub stub(mode, length);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003978 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003979 }
3980}
3981
3982
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01003983void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00003984 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdoch85b71792012-04-11 18:30:58 +01003985 // Setup the parameters to the stub/runtime call.
3986 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3987 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003988 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
Ben Murdoch85b71792012-04-11 18:30:58 +01003989 __ push(Immediate(instr->hydrogen()->constant_properties()));
Steve Block44f0eee2011-05-26 01:26:41 +01003990 int flags = instr->hydrogen()->fast_elements()
3991 ? ObjectLiteral::kFastElements
3992 : ObjectLiteral::kNoFlags;
3993 flags |= instr->hydrogen()->has_function()
3994 ? ObjectLiteral::kHasFunction
3995 : ObjectLiteral::kNoFlags;
3996 __ push(Immediate(Smi::FromInt(flags)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003997
Ben Murdoch85b71792012-04-11 18:30:58 +01003998 // Pick the right runtime function to call.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003999 if (instr->hydrogen()->depth() > 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004000 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
Ben Murdoch592a9fc2012-03-05 11:04:45 +00004001 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01004002 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004003 }
4004}
4005
4006
Steve Block44f0eee2011-05-26 01:26:41 +01004007void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4008 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
4009 __ push(eax);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004010 CallRuntime(Runtime::kToFastProperties, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01004011}
4012
4013
Ben Murdochb0fe1622011-05-05 13:52:32 +01004014void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004015 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdoch257744e2011-11-30 15:57:28 +00004016 Label materialized;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004017 // Registers will be used as follows:
4018 // edi = JS function.
4019 // ecx = literals array.
4020 // ebx = regexp literal.
4021 // eax = regexp literal clone.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004022 // esi = context.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004023 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4024 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
4025 int literal_offset = FixedArray::kHeaderSize +
4026 instr->hydrogen()->literal_index() * kPointerSize;
4027 __ mov(ebx, FieldOperand(ecx, literal_offset));
Steve Block44f0eee2011-05-26 01:26:41 +01004028 __ cmp(ebx, factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004029 __ j(not_equal, &materialized, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004030
4031 // Create regexp literal using runtime function
4032 // Result will be in eax.
4033 __ push(ecx);
4034 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
4035 __ push(Immediate(instr->hydrogen()->pattern()));
4036 __ push(Immediate(instr->hydrogen()->flags()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004037 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004038 __ mov(ebx, eax);
4039
4040 __ bind(&materialized);
4041 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4042 Label allocated, runtime_allocate;
4043 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
4044 __ jmp(&allocated);
4045
4046 __ bind(&runtime_allocate);
4047 __ push(ebx);
4048 __ push(Immediate(Smi::FromInt(size)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004049 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004050 __ pop(ebx);
4051
4052 __ bind(&allocated);
4053 // Copy the content into the newly allocated memory.
4054 // (Unroll copy loop once for better throughput).
4055 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4056 __ mov(edx, FieldOperand(ebx, i));
4057 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
4058 __ mov(FieldOperand(eax, i), edx);
4059 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
4060 }
4061 if ((size % (2 * kPointerSize)) != 0) {
4062 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
4063 __ mov(FieldOperand(eax, size - kPointerSize), edx);
4064 }
4065}
4066
4067
4068void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004069 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004070 // Use the fast case closure allocation code that allocates in new
4071 // space for nested functions that don't need literals cloning.
4072 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004073 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004074 if (!pretenure && shared_info->num_literals() == 0) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004075 FastNewClosureStub stub(
4076 shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004077 __ push(Immediate(shared_info));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004078 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004079 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01004080 __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004081 __ push(Immediate(shared_info));
4082 __ push(Immediate(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004083 ? factory()->true_value()
4084 : factory()->false_value()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004085 CallRuntime(Runtime::kNewClosure, 3, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004086 }
4087}
4088
4089
4090void LCodeGen::DoTypeof(LTypeof* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004091 LOperand* input = instr->InputAt(1);
Ben Murdoch85b71792012-04-11 18:30:58 +01004092 if (input->IsConstantOperand()) {
4093 __ push(ToImmediate(input));
4094 } else {
4095 __ push(ToOperand(input));
4096 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004097 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004098}
4099
4100
4101void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004102 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004103 int true_block = chunk_->LookupDestination(instr->true_block_id());
4104 int false_block = chunk_->LookupDestination(instr->false_block_id());
4105 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4106 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4107
Ben Murdoch85b71792012-04-11 18:30:58 +01004108 Condition final_branch_condition = EmitTypeofIs(true_label,
4109 false_label,
4110 input,
4111 instr->type_literal());
4112
4113 EmitBranch(true_block, false_block, final_branch_condition);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004114}
4115
4116
4117Condition LCodeGen::EmitTypeofIs(Label* true_label,
4118 Label* false_label,
4119 Register input,
4120 Handle<String> type_name) {
4121 Condition final_branch_condition = no_condition;
Steve Block44f0eee2011-05-26 01:26:41 +01004122 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004123 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004124 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01004125 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004126 final_branch_condition = equal;
4127
Steve Block44f0eee2011-05-26 01:26:41 +01004128 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004129 __ JumpIfSmi(input, false_label);
4130 __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
4131 __ j(above_equal, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004132 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4133 1 << Map::kIsUndetectable);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004134 final_branch_condition = zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004135
Steve Block44f0eee2011-05-26 01:26:41 +01004136 } else if (type_name->Equals(heap()->boolean_symbol())) {
4137 __ cmp(input, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004138 __ j(equal, true_label);
Steve Block44f0eee2011-05-26 01:26:41 +01004139 __ cmp(input, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004140 final_branch_condition = equal;
4141
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004142 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4143 __ cmp(input, factory()->null_value());
4144 final_branch_condition = equal;
4145
Steve Block44f0eee2011-05-26 01:26:41 +01004146 } else if (type_name->Equals(heap()->undefined_symbol())) {
4147 __ cmp(input, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004148 __ j(equal, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004149 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004150 // Check for undetectable objects => true.
4151 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
4152 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4153 1 << Map::kIsUndetectable);
4154 final_branch_condition = not_zero;
4155
Steve Block44f0eee2011-05-26 01:26:41 +01004156 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004157 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004158 __ JumpIfSmi(input, false_label);
Ben Murdoch85b71792012-04-11 18:30:58 +01004159 __ CmpObjectType(input, FIRST_CALLABLE_SPEC_OBJECT_TYPE, input);
4160 final_branch_condition = above_equal;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004161
Steve Block44f0eee2011-05-26 01:26:41 +01004162 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004163 __ JumpIfSmi(input, false_label);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004164 if (!FLAG_harmony_typeof) {
4165 __ cmp(input, factory()->null_value());
4166 __ j(equal, true_label);
4167 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004168 __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004169 __ j(below, false_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004170 __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4171 __ j(above, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004172 // Check for undetectable objects => false.
4173 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4174 1 << Map::kIsUndetectable);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004175 final_branch_condition = zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004176
4177 } else {
Ben Murdoch85b71792012-04-11 18:30:58 +01004178 final_branch_condition = not_equal;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004179 __ jmp(false_label);
Ben Murdoch85b71792012-04-11 18:30:58 +01004180 // A dead branch instruction will be generated after this point.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004181 }
Ben Murdoch85b71792012-04-11 18:30:58 +01004182
Ben Murdochb0fe1622011-05-05 13:52:32 +01004183 return final_branch_condition;
4184}
4185
4186
Steve Block1e0659c2011-05-24 12:43:12 +01004187void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4188 Register temp = ToRegister(instr->TempAt(0));
4189 int true_block = chunk_->LookupDestination(instr->true_block_id());
4190 int false_block = chunk_->LookupDestination(instr->false_block_id());
4191
4192 EmitIsConstructCall(temp);
4193 EmitBranch(true_block, false_block, equal);
4194}
4195
4196
4197void LCodeGen::EmitIsConstructCall(Register temp) {
4198 // Get the frame pointer for the calling frame.
4199 __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4200
4201 // Skip the arguments adaptor frame if it exists.
Ben Murdoch257744e2011-11-30 15:57:28 +00004202 Label check_frame_marker;
Steve Block1e0659c2011-05-24 12:43:12 +01004203 __ cmp(Operand(temp, StandardFrameConstants::kContextOffset),
4204 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +00004205 __ j(not_equal, &check_frame_marker, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004206 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
4207
4208 // Check the marker in the calling frame.
4209 __ bind(&check_frame_marker);
4210 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
4211 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
4212}
4213
4214
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004215void LCodeGen::EnsureSpaceForLazyDeopt() {
4216 // Ensure that we have enough space after the previous lazy-bailout
4217 // instruction for patching the code here.
4218 int current_pc = masm()->pc_offset();
4219 int patch_size = Deoptimizer::patch_size();
4220 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4221 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
Ben Murdoch85b71792012-04-11 18:30:58 +01004222 while (padding_size-- > 0) {
4223 __ nop();
4224 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004225 }
4226 last_lazy_deopt_pc_ = masm()->pc_offset();
4227}
4228
4229
Ben Murdochb0fe1622011-05-05 13:52:32 +01004230void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004231 EnsureSpaceForLazyDeopt();
4232 ASSERT(instr->HasEnvironment());
4233 LEnvironment* env = instr->environment();
4234 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4235 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004236}
4237
4238
4239void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4240 DeoptimizeIf(no_condition, instr->environment());
4241}
4242
4243
4244void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4245 LOperand* obj = instr->object();
4246 LOperand* key = instr->key();
4247 __ push(ToOperand(obj));
Ben Murdoch85b71792012-04-11 18:30:58 +01004248 if (key->IsConstantOperand()) {
4249 __ push(ToImmediate(key));
4250 } else {
4251 __ push(ToOperand(key));
4252 }
Steve Block1e0659c2011-05-24 12:43:12 +01004253 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4254 LPointerMap* pointers = instr->pointer_map();
Steve Block1e0659c2011-05-24 12:43:12 +01004255 RecordPosition(pointers->position());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004256 // Create safepoint generator that will also ensure enough space in the
4257 // reloc info for patching in deoptimization (since this is invoking a
4258 // builtin)
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004259 SafepointGenerator safepoint_generator(
4260 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004261 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
Ben Murdoch257744e2011-11-30 15:57:28 +00004262 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004263}
4264
4265
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004266void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004267 PushSafepointRegistersScope scope(this);
4268 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4269 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4270 RecordSafepointWithLazyDeopt(
4271 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4272 ASSERT(instr->HasEnvironment());
4273 LEnvironment* env = instr->environment();
4274 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004275}
4276
4277
4278void LCodeGen::DoStackCheck(LStackCheck* instr) {
4279 class DeferredStackCheck: public LDeferredCode {
4280 public:
4281 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4282 : LDeferredCode(codegen), instr_(instr) { }
4283 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4284 private:
4285 LStackCheck* instr_;
4286 };
4287
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004288 ASSERT(instr->HasEnvironment());
4289 LEnvironment* env = instr->environment();
4290 // There is no LLazyBailout instruction for stack-checks. We have to
4291 // prepare for lazy deoptimization explicitly here.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004292 if (instr->hydrogen()->is_function_entry()) {
4293 // Perform stack overflow check.
4294 Label done;
4295 ExternalReference stack_limit =
4296 ExternalReference::address_of_stack_limit(isolate());
4297 __ cmp(esp, Operand::StaticVariable(stack_limit));
4298 __ j(above_equal, &done, Label::kNear);
4299
4300 ASSERT(instr->context()->IsRegister());
4301 ASSERT(ToRegister(instr->context()).is(esi));
4302 StackCheckStub stub;
4303 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004304 EnsureSpaceForLazyDeopt();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004305 __ bind(&done);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004306 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4307 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004308 } else {
4309 ASSERT(instr->hydrogen()->is_backwards_branch());
4310 // Perform stack overflow check if this goto needs it before jumping.
4311 DeferredStackCheck* deferred_stack_check =
4312 new DeferredStackCheck(this, instr);
4313 ExternalReference stack_limit =
4314 ExternalReference::address_of_stack_limit(isolate());
4315 __ cmp(esp, Operand::StaticVariable(stack_limit));
4316 __ j(below, deferred_stack_check->entry());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004317 EnsureSpaceForLazyDeopt();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004318 __ bind(instr->done_label());
4319 deferred_stack_check->SetExit(instr->done_label());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004320 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4321 // Don't record a deoptimization index for the safepoint here.
4322 // This will be done explicitly when emitting call and the safepoint in
4323 // the deferred code.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004324 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004325}
4326
4327
4328void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4329 // This is a pseudo-instruction that ensures that the environment here is
4330 // properly registered for deoptimization and records the assembler's PC
4331 // offset.
4332 LEnvironment* environment = instr->environment();
4333 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4334 instr->SpilledDoubleRegisterArray());
4335
4336 // If the environment were already registered, we would have no way of
4337 // backpatching it with the spill slot operands.
4338 ASSERT(!environment->HasBeenRegistered());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004339 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004340 ASSERT(osr_pc_offset_ == -1);
4341 osr_pc_offset_ = masm()->pc_offset();
4342}
4343
4344
Ben Murdoch257744e2011-11-30 15:57:28 +00004345void LCodeGen::DoIn(LIn* instr) {
4346 LOperand* obj = instr->object();
4347 LOperand* key = instr->key();
Ben Murdoch85b71792012-04-11 18:30:58 +01004348 if (key->IsConstantOperand()) {
4349 __ push(ToImmediate(key));
4350 } else {
4351 __ push(ToOperand(key));
4352 }
4353 if (obj->IsConstantOperand()) {
4354 __ push(ToImmediate(obj));
4355 } else {
4356 __ push(ToOperand(obj));
4357 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004358 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4359 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch257744e2011-11-30 15:57:28 +00004360 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004361 SafepointGenerator safepoint_generator(
4362 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00004363 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4364}
4365
4366
Ben Murdochb0fe1622011-05-05 13:52:32 +01004367#undef __
4368
4369} } // namespace v8::internal
Ben Murdochb8e0da22011-05-16 14:20:40 +01004370
4371#endif // V8_TARGET_ARCH_IA32